Skip to content
Snippets Groups Projects
Commit 3b841e19 authored by anonymous's avatar anonymous
Browse files

ODPSH-408

parent c68677fe
No related branches found
No related tags found
No related merge requests found
...@@ -392,6 +392,9 @@ class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm ...@@ -392,6 +392,9 @@ class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm
}) })
schema.update({'__extras': [toolkit.get_converter('odsh_validate_extras')] }) schema.update({'__extras': [toolkit.get_converter('odsh_validate_extras')] })
## only to make sure the spatial field is there for validation
# schema.update({'spatial': [toolkit.get_converter('convert_from_extras')]})
def create_package_schema(self): def create_package_schema(self):
schema = super(OdshPlugin, self).create_package_schema() schema = super(OdshPlugin, self).create_package_schema()
self._update_schema(schema) self._update_schema(schema)
......
...@@ -191,6 +191,7 @@ is_required=true,placeholder=_('Enter title')) }} ...@@ -191,6 +191,7 @@ is_required=true,placeholder=_('Enter title')) }}
<!-- field spatial_uri --> <!-- field spatial_uri -->
{{errors}}
{% set field = 'spatial_uri' %} {% set field = 'spatial_uri' %}
{% set value = h.odsh_extract_value_from_extras(data.extras,field) %} {% set value = h.odsh_extract_value_from_extras(data.extras,field) %}
{% set error = h.odsh_extract_error(field, errors) %} {% set error = h.odsh_extract_error(field, errors) %}
...@@ -210,13 +211,14 @@ is_required=true,placeholder=_('Enter title')) }} ...@@ -210,13 +211,14 @@ is_required=true,placeholder=_('Enter title')) }}
</div> </div>
</div> </div>
{#
{% set spatial_extends_available = h.odsh_spatial_extends_available() %} {% set spatial_extends_available = h.odsh_spatial_extends_available() %}
<div class="control-group"> <div class="control-group">
<label class="control-label" for="field-{{field}}">{{_('Spatial uri')}}: <span title="Dieses Feld ist erforderlich" class="control-required">*</span> </label> <label class="control-label" for="field-{{field}}">{{_('Spatial uri')}}: <span title="Dieses Feld ist erforderlich" class="control-required">*</span> </label>
<div class="controls"> <div class="controls">
<div class="row-fluid"> <div class="row-fluid">
<div class="span6"> <div class="span6">
<select id="field-{{field}}" data-module="autocomplete"> <select id="field-{{field}}" data-module="autocomplete" data-module-items="10">
{% for extend in spatial_extends_available%} {% for extend in spatial_extends_available%}
<option value="{{ extend }}" {% if selected_extend %} selected="selected" {% endif %}>{{extend}}</option> <option value="{{ extend }}" {% if selected_extend %} selected="selected" {% endif %}>{{extend}}</option>
{% endfor %} {% endfor %}
...@@ -225,6 +227,7 @@ is_required=true,placeholder=_('Enter title')) }} ...@@ -225,6 +227,7 @@ is_required=true,placeholder=_('Enter title')) }}
</div> </div>
</div> </div>
</div> </div>
#}
<!-- field private --> <!-- field private -->
<div class="control-group"> <div class="control-group">
......
...@@ -9,6 +9,7 @@ from routes import url_for ...@@ -9,6 +9,7 @@ from routes import url_for
from nose.tools import assert_true, assert_false, assert_equal, assert_in from nose.tools import assert_true, assert_false, assert_equal, assert_in
from ckanext.odsh.helpers import odsh_create_checksum from ckanext.odsh.helpers import odsh_create_checksum
webtest_submit = helpers.webtest_submit webtest_submit = helpers.webtest_submit
import pdb
class TestUpload(helpers.FunctionalTestBase): class TestUpload(helpers.FunctionalTestBase):
...@@ -43,6 +44,60 @@ class TestUpload(helpers.FunctionalTestBase): ...@@ -43,6 +44,60 @@ class TestUpload(helpers.FunctionalTestBase):
# assert # assert
response.mustcontain('spatial_uri: uri unknown') response.mustcontain('spatial_uri: uri unknown')
@odsh_test()
def test_upload_empty_spatial_uri(self):
# arrange
form = self._get_package_new_form()
# act
form[self._get_field_name('spatial_uri')] = ''
response = self._submit_form(form)
# assert
assert 'spatial_uri: empty not allowed' in response
@odsh_test()
def test_edit_empty_spatial_uri(self):
# arrange
dataset = self._create_dataset()
form = self._get_package_update_form(dataset['id'])
# act
form[self._get_field_name('spatial_uri')] = ''
response = self._submit_form(form)
# assert
assert 'spatial_uri: empty not allowed' in response
@odsh_test()
def test_edit_empty_spatial_uri_but_spatial(self):
# arrange
extras = [
{'key': 'temporal_start', 'value': '2000-01-27'},
{'key': 'temporal_end', 'value': '2000-01-27'},
{'key': 'issued', 'value': '2000-01-27'},
{'key': 'groups', 'value': 'soci'},
{'key': 'licenseAttributionByText', 'value': 'text'},
{'key': 'spatial_uri', 'value': ''},
{'key': 'spatial', 'value': '{"type": "Point", "coordinates": [9.511769, 53.928028]}'},
]
dataset = self._create_dataset(extras=extras)
# pdb.set_trace()
form = self._get_package_update_form(dataset['id'])
# # act
# form[self._get_field_name('spatial_uri')] = ''
# response = self._submit_form(form)
# # assert
# assert 'spatial_uri: empty not allowed' not in response
response = self._submit_and_follow_form(form)
# assert
response.mustcontain('Manage Dataset')
@odsh_test() @odsh_test()
def test_upload_empty_wrong_date_temporal_start(self): def test_upload_empty_wrong_date_temporal_start(self):
# arrange # arrange
...@@ -84,6 +139,7 @@ class TestUpload(helpers.FunctionalTestBase): ...@@ -84,6 +139,7 @@ class TestUpload(helpers.FunctionalTestBase):
'spatial_uri')] = 'http://dcat-ap.de/def/politicalGeocoding/districtKey/01001' 'spatial_uri')] = 'http://dcat-ap.de/def/politicalGeocoding/districtKey/01001'
form[self._get_field_name('issued')] = '2019-01-29' form[self._get_field_name('issued')] = '2019-01-29'
form[self._get_field_name('temporal_start')] = '2019-01-29' form[self._get_field_name('temporal_start')] = '2019-01-29'
form[self._get_field_name('groups')] = 'soci'
form[self._get_field_name('temporal_end')] = '2019-02-02' form[self._get_field_name('temporal_end')] = '2019-02-02'
form['license_id'] = 'http://dcat-ap.de/def/licenses/dl-by-de/2.0' form['license_id'] = 'http://dcat-ap.de/def/licenses/dl-by-de/2.0'
form[self._get_field_name('licenseAttributionByText')].value = 'text' form[self._get_field_name('licenseAttributionByText')].value = 'text'
...@@ -136,3 +192,36 @@ class TestUpload(helpers.FunctionalTestBase): ...@@ -136,3 +192,36 @@ class TestUpload(helpers.FunctionalTestBase):
extra_environ=self.env, extra_environ=self.env,
) )
return response.forms['dataset-edit'] return response.forms['dataset-edit']
def _get_package_update_form(self, id):
app = self._get_test_app()
# user = factories.User()
response = app.get(
url=url_for(controller='package', action='edit', id=id),
extra_environ=self.env,
)
return response.forms['dataset-edit']
def _create_dataset(self, name='my-own-dataset', temporal_start='2000-01-27', temporal_end='2000-01-27',title='title', extras=None):
user = factories.User()
self.org = factories.Organization(
name="my-org",
users=[{'name': user['id'], 'capacity': 'admin'}]
)
self.env = {'REMOTE_USER': user['name'].encode('ascii')}
if not extras:
extras = [
{'key': 'temporal_start', 'value': temporal_start},
{'key': 'temporal_end', 'value': temporal_end},
{'key': 'issued', 'value': '2000-01-27'},
{'key': 'spatial_uri', 'value': 'http://dcat-ap.de/def/politicalGeocoding/districtKey/01001'},
{'key': 'groups', 'value': 'soci'},
{'key': 'licenseAttributionByText', 'value': 'text'}
]
return factories.Dataset(user=user,
name=name,
title=title,
issued='27-01-2000',
extras=extras,
license_id='http://dcat-ap.de/def/licenses/dl-by-de/2.0')
from ckanext.odsh.validation import *
import ckan.plugins.toolkit as toolkit
import pylons
import ckan.model as modelMock
import sys import sys
import json import json
from nose.tools import * from nose.tools import *
...@@ -7,12 +11,18 @@ from mock import MagicMock, Mock, patch ...@@ -7,12 +11,18 @@ from mock import MagicMock, Mock, patch
def mockInvalid(*args, **kwargs): def mockInvalid(*args, **kwargs):
return Exception(*args, **kwargs) return Exception(*args, **kwargs)
def mock_(s): def mock_(s):
return s return s
m = MagicMock() m = MagicMock()
class MissingMock: class MissingMock:
pass pass
m.Missing = MissingMock m.Missing = MissingMock
sys.modules['ckan'] = MagicMock() sys.modules['ckan'] = MagicMock()
...@@ -24,21 +34,16 @@ sys.modules['ckan.lib.navl'] = MagicMock() ...@@ -24,21 +34,16 @@ sys.modules['ckan.lib.navl'] = MagicMock()
sys.modules['ckan.lib.navl.dictization_functions'] = m sys.modules['ckan.lib.navl.dictization_functions'] = m
sys.modules['pylons'] = MagicMock() sys.modules['pylons'] = MagicMock()
import ckan.model as modelMock
import pylons
import ckan.plugins.toolkit as toolkit
toolkit.Invalid = mockInvalid toolkit.Invalid = mockInvalid
toolkit._ = mock_ toolkit._ = mock_
from ckanext.odsh.validation import *
def test_get_validators(): def test_get_validators():
assert get_validators() assert get_validators()
# @patch('toolkit.get_validator', side_effect=lambda a: None)
def test_tag_string_convert(): def test_tag_string_convert():
# arrange # arrange
data = {'tag_string': 'tag1,tag2'} data = {'tag_string': 'tag1,tag2'}
...@@ -74,6 +79,28 @@ def test_known_spatial_uri(url_mock, get_mock, csv_mock): ...@@ -74,6 +79,28 @@ def test_known_spatial_uri(url_mock, get_mock, csv_mock):
assert data[('extras', 2, 'value')] == '0' assert data[('extras', 2, 'value')] == '0'
@raises(Exception)
@patch('urllib2.urlopen')
@patch('pylons.config.get', side_effect='foo')
@patch('csv.reader', side_effect=[[['uri', 'text', json.dumps({"geometry": 0})]]])
def test_known_spatial_uri_without_uri(url_mock, get_mock, csv_mock):
# arrange
data = {('extras', 0, 'key'): 'spatial_uri',
('extras', 0, 'value'): ''}
# act
known_spatial_uri('spatial_uri', data, {}, None)
def test_known_spatial_uri_without_uri_with_spatial():
# arrange
data = {('extras', 0, 'key'): 'spatial',
('extras', 0, 'value'): 'value',
('extras', 1, 'key'): 'spatial_uri',
('extras', 1, 'value'): ''}
# act
known_spatial_uri('spatial_uri', data, {}, None)
def test_validate_licenseAttributionByText(): def test_validate_licenseAttributionByText():
# arrange # arrange
def get_licenses(): def get_licenses():
......
# This Python file uses the following encoding: utf-8 # This Python file uses the following encoding: utf-8
import logging
import csv import csv
import re import re
import urllib2 import urllib2
...@@ -12,11 +13,13 @@ from ckan.lib.navl.dictization_functions import Missing ...@@ -12,11 +13,13 @@ from ckan.lib.navl.dictization_functions import Missing
from pylons import config from pylons import config
import pdb
_ = toolkit._ _ = toolkit._
import logging
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def _extract_value(data, field): def _extract_value(data, field):
key = None key = None
for k in data.keys(): for k in data.keys():
...@@ -27,6 +30,7 @@ def _extract_value(data, field): ...@@ -27,6 +30,7 @@ def _extract_value(data, field):
return None return None
return data[(key[0], key[1], 'value')] return data[(key[0], key[1], 'value')]
def validate_extra_groups(data, requireAtLeastOne, errors): def validate_extra_groups(data, requireAtLeastOne, errors):
value = _extract_value(data, 'groups') value = _extract_value(data, 'groups')
if value != None: if value != None:
...@@ -56,19 +60,21 @@ def validate_extra_groups(data, requireAtLeastOne, errors): ...@@ -56,19 +60,21 @@ def validate_extra_groups(data, requireAtLeastOne, errors):
not data.get(('groups', 0, 'name'), False): not data.get(('groups', 0, 'name'), False):
errors['groups'] = 'at least one group needed' errors['groups'] = 'at least one group needed'
def validate_extras(key, data, errors, context): def validate_extras(key, data, errors, context):
extra_errors = {} extra_errors = {}
isStaNord = ('id',) in data and data[('id',)][:7] == 'StaNord' isStaNord = ('id',) in data and data[('id',)][:7] == 'StaNord'
validate_extra_groups(data, True, extra_errors) validate_extra_groups(data, True, extra_errors)
validate_extra_date_new(key, 'issued', data, isStaNord, extra_errors) validate_extra_date_new(key, 'issued', data, isStaNord, extra_errors)
validate_extra_date_new(key, 'temporal_start', data, isStaNord, extra_errors) validate_extra_date_new(key, 'temporal_start',
data, isStaNord, extra_errors)
validate_extra_date_new(key, 'temporal_end', data, True, extra_errors) validate_extra_date_new(key, 'temporal_end', data, True, extra_errors)
if len(extra_errors.values()): if len(extra_errors.values()):
raise toolkit.Invalid(extra_errors) raise toolkit.Invalid(extra_errors)
def _set_value(data, field, value): def _set_value(data, field, value):
key = None key = None
for k in data.keys(): for k in data.keys():
...@@ -79,6 +85,7 @@ def _set_value(data, field, value): ...@@ -79,6 +85,7 @@ def _set_value(data, field, value):
return None return None
data[(key[0], key[1], 'value')] = value data[(key[0], key[1], 'value')] = value
def validate_extra_date_new(key, field, data, optional, errors): def validate_extra_date_new(key, field, data, optional, errors):
value = _extract_value(data, field) value = _extract_value(data, field)
...@@ -96,6 +103,7 @@ def validate_extra_date_new(key, field, data, optional, errors): ...@@ -96,6 +103,7 @@ def validate_extra_date_new(key, field, data, optional, errors):
pass pass
errors[field] = 'not a valid date' errors[field] = 'not a valid date'
def validate_licenseAttributionByText(key, data, errors, context): def validate_licenseAttributionByText(key, data, errors, context):
register = model.Package.get_license_register() register = model.Package.get_license_register()
isByLicense = False isByLicense = False
...@@ -136,9 +144,21 @@ def known_spatial_uri(key, data, errors, context): ...@@ -136,9 +144,21 @@ def known_spatial_uri(key, data, errors, context):
value = _extract_value(data, 'spatial_uri') value = _extract_value(data, 'spatial_uri')
if not value: if not value:
poly = None
# some harvesters might import a polygon directly... # some harvesters might import a polygon directly...
# pdb.set_trace()
poly = _extract_value(data, 'spatial') poly = _extract_value(data, 'spatial')
has_old_uri = False
pkg = context.get('package', None)
if pkg:
old_uri = pkg.extras.get('spatial_uri', None)
has_old_uri = old_uri != None and len(old_uri) > 0
if not poly: if not poly:
poly = pkg.extras.get('spatial', None)
if not poly or has_old_uri:
# pdb.set_trace()
raise toolkit.Invalid('spatial_uri: empty not allowed') raise toolkit.Invalid('spatial_uri: empty not allowed')
else: else:
return return
...@@ -164,17 +184,21 @@ def known_spatial_uri(key, data, errors, context): ...@@ -164,17 +184,21 @@ def known_spatial_uri(key, data, errors, context):
raise toolkit.Invalid( raise toolkit.Invalid(
'spatial_uri: uri unknown') 'spatial_uri: uri unknown')
# Get the current extras index new_index = next_extra_index(data)
current_indexes = [k[1] for k in data.keys()
if len(k) > 1 and k[0] == 'extras']
new_index = max(current_indexes) + 1 if current_indexes else 0
data[('extras', new_index, 'key')] = 'spatial_text' data[('extras', new_index, 'key')] = 'spatial_text'
data[('extras', new_index, 'value')] = spatial_text data[('extras', new_index, 'value')] = spatial_text
data[('extras', new_index+1, 'key')] = 'spatial' data[('extras', new_index+1, 'key')] = 'spatial'
data[('extras', new_index+1, 'value')] = spatial data[('extras', new_index+1, 'value')] = spatial
def next_extra_index(data):
current_indexes = [k[1] for k in data.keys()
if len(k) > 1 and k[0] == 'extras']
return max(current_indexes) + 1 if current_indexes else 0
def tag_name_validator(value, context): def tag_name_validator(value, context):
tagname_match = re.compile('[\w \-.\:\(\)\´\`]*$', re.UNICODE) tagname_match = re.compile('[\w \-.\:\(\)\´\`]*$', re.UNICODE)
if not tagname_match.match(value): if not tagname_match.match(value):
...@@ -182,6 +206,7 @@ def tag_name_validator(value, context): ...@@ -182,6 +206,7 @@ def tag_name_validator(value, context):
'characters or symbols: -_.:()') % (value)) 'characters or symbols: -_.:()') % (value))
return value return value
def tag_string_convert(key, data, errors, context): def tag_string_convert(key, data, errors, context):
'''Takes a list of tags that is a comma-separated string (in data[key]) '''Takes a list of tags that is a comma-separated string (in data[key])
and parses tag names. These are added to the data dict, enumerated. They and parses tag names. These are added to the data dict, enumerated. They
...@@ -193,7 +218,6 @@ def tag_string_convert(key, data, errors, context): ...@@ -193,7 +218,6 @@ def tag_string_convert(key, data, errors, context):
else: else:
tags = data[key] tags = data[key]
current_index = max([int(k[1]) for k in data.keys() current_index = max([int(k[1]) for k in data.keys()
if len(k) == 3 and k[0] == 'tags'] + [-1]) if len(k) == 3 and k[0] == 'tags'] + [-1])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment