Newer
Older
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
from ckan.lib.plugins import DefaultDatasetForm
from ckan.logic.validators import tag_string_convert
from ckanext.odsh.lib.uploader import ODSHResourceUpload
import ckan.lib.helpers as helpers
import helpers as odsh_helpers
from pylons import config
import urllib2
import csv

anonymous
committed
import logging
log = logging.getLogger(__name__)

anonymous
committed
def odsh_get_facet_items_dict(name, limit=None):

anonymous
committed
Gets all facets like 'get_facet_items_dict' but sorted alphabetically
instead by count.
'''
facets = helpers.get_facet_items_dict(name, limit)
facets.sort(key=lambda it: (it['display_name'].lower(), -it['count']))
log.info(facets)
return facets
def odsh_main_groups():
'''Return a list of the groups to be shown on the start page.'''
# Get a list of all the site's groups from CKAN, sorted by number of
# datasets.
groups = toolkit.get_action('group_list')(
data_dict={'all_fields': True})
return groups
def odsh_convert_groups_string(value,context):
if not value:
return []
if type(value) is not list:
value=[value]
groups=helpers.groups_available()
ret = []
for v in value:
for g in groups:
if g['id']==v:
ret.append(g)
return ret
def odsh_now():
return helpers.render_datetime(datetime.datetime.now(),"%Y-%m-%d")
def odsh_group_id_selected(selected, group_id):
if type(selected) is not list:
selected=[selected]
for g in selected:
if (isinstance(g, basestring) and group_id == g) or (type(g) is dict and group_id == g['id']):
return True
return False
def known_spatial_uri(key, data, errors, context):
mapping_file = config.get('ckanext.odsh.spatial.mapping')
try:
mapping_file = urllib2.urlopen(mapping_file)
except Exception:
raise Exception("Could not load spatial mapping file!")
not_found = True
spatial_text = str()
spatial = str()
cr = csv.reader(mapping_file, delimiter="\t")
for row in cr:
if row[0] == data[key]:
not_found = False
spatial_text = row[1]
break
if not_found:
raise toolkit.Invalid("The specified URI is not known")
# Get the current extras index
current_indexes = [k[1] for k in data.keys()
if len(k) > 1 and k[0] == 'extras']
new_index = max(current_indexes) + 1 if current_indexes else 0
data[('extras', new_index, 'key')] = 'spatial_text'
data[('extras', new_index, 'value')] = spatial_text
data[('extras', new_index+1, 'key')] = 'spatial'
data[('extras', new_index+1, 'value')] = spatial
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
def odsh_tag_name_validator(value, context):
tagname_match = re.compile('[\w \-.\:\(\)]*$', re.UNICODE)
if not tagname_match.match(value):
raise toolkit.Invalid(_('Tag "%s" must be alphanumeric '
'characters or symbols: -_.:()') % (value))
return value
def odsh_tag_string_convert(key, data, errors, context):
'''Takes a list of tags that is a comma-separated string (in data[key])
and parses tag names. These are added to the data dict, enumerated. They
are also validated.'''
if isinstance(data[key], basestring):
tags = [tag.strip() \
for tag in data[key].split(',') \
if tag.strip()]
else:
tags = data[key]
current_index = max( [int(k[1]) for k in data.keys() if len(k) == 3 and k[0] == 'tags'] + [-1] )
for num, tag in zip(count(current_index+1), tags):
data[('tags', num, 'name')] = tag
for tag in tags:
toolkit.get_validator('tag_length_validator')(tag, context)
odsh_tag_name_validator(tag, context)
class OdshIcapPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IUploader, inherit=True)
def get_resource_uploader(self, data_dict):
return ODSHResourceUpload(data_dict)
class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm):
plugins.implements(plugins.ITemplateHelpers)
plugins.implements(plugins.IRoutes, inherit=True)
plugins.implements(plugins.ITranslation)
plugins.implements(plugins.IFacets)
plugins.implements(plugins.IDatasetForm)
plugins.implements(plugins.IValidators)
plugins.implements(plugins.IPackageController, inherit=True)
# IConfigurer
def update_config(self, config_):
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'odsh')
def get_helpers(self):
# Template helper function names should begin with the name of the
# extension they belong to, to avoid clashing with functions from
# other extensions.
return {'odsh_main_groups': odsh_main_groups,

anonymous
committed
'odsh_now': odsh_now,
'odsh_group_id_selected': odsh_group_id_selected,
'odsh_get_facet_items_dict': odsh_get_facet_items_dict,
'odsh_openness_score_dataset_html': odsh_helpers.odsh_openness_score_dataset_html,
'odsh_get_resource_details': odsh_helpers.odsh_get_resource_details,
'odsh_get_resource_views': odsh_helpers.odsh_get_resource_views,
'odsh_get_bounding_box': odsh_helpers.odsh_get_bounding_box,
'odsh_render_datetime': odsh_helpers.odsh_render_datetime,
'odsh_upload_known_formats': odsh_helpers.odsh_upload_known_formats

anonymous
committed
}
def before_map(self, map):
map.connect('info_page', '/info_page', controller='ckanext.odsh.controller:OdshRouteController', action='info_page')
# redirect all user routes to custom controller
with SubMapper(map, controller='ckanext.odsh.controller:OdshUserController') as m:
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
m.connect('/user/edit', action='edit')
m.connect('user_generate_apikey', '/user/generate_key/{id}', action='generate_apikey')
m.connect('/user/activity/{id}/{offset}', action='activity')
m.connect('user_activity_stream', '/user/activity/{id}',
action='activity', ckan_icon='clock-o')
m.connect('user_dashboard', '/dashboard', action='dashboard',
ckan_icon='list')
m.connect('user_dashboard_datasets', '/dashboard/datasets',
action='dashboard_datasets', ckan_icon='sitemap')
m.connect('user_dashboard_groups', '/dashboard/groups',
action='dashboard_groups', ckan_icon='users')
m.connect('user_dashboard_organizations', '/dashboard/organizations',
action='dashboard_organizations', ckan_icon='building-o')
m.connect('/dashboard/{offset}', action='dashboard')
m.connect('user_follow', '/user/follow/{id}', action='follow')
m.connect('/user/unfollow/{id}', action='unfollow')
m.connect('user_followers', '/user/followers/{id:.*}',
action='followers', ckan_icon='users')
m.connect('user_edit', '/user/edit/{id:.*}', action='edit',
ckan_icon='cog')
m.connect('user_delete', '/user/delete/{id}', action='delete')
m.connect('/user/reset/{id:.*}', action='perform_reset')
m.connect('register', '/user/register', action='register')
m.connect('login', '/user/login', action='login')
m.connect('/user/_logout', action='logout')
m.connect('/user/logged_in', action='logged_in')
m.connect('/user/logged_out', action='logged_out')
m.connect('/user/logged_out_redirect', action='logged_out_page')
m.connect('/user/reset', action='request_reset')
m.connect('/user/me', action='me')
m.connect('/user/set_lang/{lang}', action='set_lang')
m.connect('user_datasets', '/user/{id:.*}', action='read',
ckan_icon='sitemap')
m.connect('user_index', '/user', action='index')
def dataset_facets(self, facets_dict, package_type):

anonymous
committed
# TODO: Frage von Pascal 12.10.2018: warum ist die Ordnung hier genau umgekehrt (von hinten nach vorne?)
# Christian: ist sie wohl nicht, ckan sortiert das einfach irgendwie neu
return OrderedDict({'organization': _('Herausgeber'),
'res_format': _('Dateiformat'),
'license_title': _('Lizenz'),
'groups': _('Kategorie')})
def organization_facets(self, facets_dict, organization_type, package_type):
return OrderedDict({'organization': _('Herausgeber'),
'res_format': _('Dateiformat'),
'license_title': _('Lizenz'),
'groups': _('Kategorie')})
def group_facets(self, facets_dict, group_type, package_type):
return OrderedDict({'organization': _('Herausgeber'),
'res_format': _('Dateiformat'),
'license_title': _('Lizenz'),
'groups': _('Kategorie')})
def _fields(self):
return ['title','notes']
def _extraFields(self):
return ['issued', 'temporal_start', 'temporal_end', 'spatial_uri', 'licenseAttributionByText']
def _update_schema(self,schema):
for field in self._extraFields():
if field == 'licenseAttributionByText':
schema.update({field: [
toolkit.get_validator('ignore_missing'),
toolkit.get_converter('convert_to_extras')]})
elif field == 'spatial_uri':
schema.update({field: [
toolkit.get_converter('not_empty'),
toolkit.get_converter('convert_to_extras')]})
else:
schema.update({field: [
toolkit.get_converter('not_empty'),
toolkit.get_converter('convert_to_extras')]})
schema.update({field: [toolkit.get_converter('not_empty')]})
for i, item in enumerate(schema['tags']['name']):
if item == toolkit.get_validator('tag_name_validator'):
schema['tags']['name'][i] = toolkit.get_validator('odsh_tag_name_validator')
for i, item in enumerate(schema['tag_string']):
if item == tag_string_convert:
schema['tag_string'][i] = odsh_tag_string_convert
'url' : [ toolkit.get_converter('not_empty') ],
'format' : [ toolkit.get_converter('not_empty') ]
})
def create_package_schema(self):
schema = super(OdshPlugin, self).create_package_schema()
self._update_schema(schema)
return schema
def update_package_schema(self):
schema = super(OdshPlugin, self).update_package_schema()
self._update_schema(schema)
return schema
def show_package_schema(self):
schema = super(OdshPlugin, self).show_package_schema()
for field in self._extraFields():
schema.update({
field : [toolkit.get_converter('convert_from_extras')]
})
return schema
def is_fallback(self):
# Return True to register this plugin as the default handler for
# package types not handled by any other IDatasetForm plugin.
return True
def package_types(self):
# This plugin doesn't handle any special package types, it just
# registers itself as the default (above).
return []
return { 'odsh_convert_groups_string': odsh_convert_groups_string,
'known_spatial_uri': known_spatial_uri,
'odsh_tag_name_validator': odsh_tag_name_validator}
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
def extend_search_convert_local_to_utc_timestamp(self, str_timestamp):
DATETIME_FORMAT = '%Y-%m-%d'
if not str_timestamp:
return ''
##Todo: do we need timezone conversions?
local_datetime = datetime.datetime.strptime(str_timestamp, DATETIME_FORMAT);
# tz_code = config.get('ckan.timezone', 'Australia/Melbourne')
# local = timezone(tz_code)
# utc_datetime = _make_aware(local_datetime, local)
# local_datetime = utc_datetime.astimezone(pytz.utc)
return local_datetime.strftime(DATETIME_FORMAT)+"T00:00:00Z"
# Add the custom parameters to Solr's facet queries
def before_search(self, search_params):
extras = search_params.get('extras')
if not extras:
# There are no extras in the search params, so do nothing.
return search_params
print(search_params)
start_date = self.extend_search_convert_local_to_utc_timestamp(extras.get('ext_startdate'))
end_date = self.extend_search_convert_local_to_utc_timestamp(extras.get('ext_enddate'))
if not start_date and not end_date:
return search_params
if not start_date:
start_date='*'
if not end_date:
end_date='*'
fq = search_params['fq']
fq = '{fq} +extras_temporal_start:[{start_date} TO {end_date}] OR +extras_temporal_end:[{start_date} TO {end_date}]'.format(fq=fq, start_date=start_date, end_date=end_date)
print(fq)
#return modified facet queries
search_params['fq'] = fq
return search_params