Skip to content
Snippets Groups Projects
Commit fe9b057f authored by anonymous's avatar anonymous
Browse files

merge

parents a866396e c3157bf5
Branches
Tags
No related merge requests found
<VirtualHost *:80>
WSGIScriptAlias / /etc/ckan/default/apache.wsgi
# Pass authorization info on (needed for rest api).
WSGIPassAuthorization On
# Deploy as a daemon (avoids conflicts between CKAN instances).
WSGIDaemonProcess ckan_default display-name=ckan_default processes=2 threads=15
WSGIProcessGroup ckan_default
RewriteEngine On
#resources
RewriteCond %{REQUEST_URI} !^/.*.css
RewriteCond %{REQUEST_URI} !^/base/images/
RewriteCond %{REQUEST_URI} !^/base/fonts/
RewriteCond %{REQUEST_URI} !^/fanstatic/
RewriteCond %{REQUEST_URI} !^/scripts/vendor/
RewriteCond %{REQUEST_URI} !^/uploads/group/
#api
RewriteCond %{REQUEST_URI} !^/api/i18n/de$
RewriteCond %{REQUEST_URI} !^/api/2/.*$
RewriteCond %{REQUEST_URI} !^/api/3/action/package.*$
RewriteCond %{REQUEST_URI} !^/catalog.xml[^/]*$
#user
RewriteCond %{REQUEST_URI} !^/user/login$
RewriteCond %{REQUEST_URI} !^/user/logged_out_redirect$
RewriteCond %{REQUEST_URI} !^/user/reset$
RewriteCond %{REQUEST_URI} !^/user/edit$
RewriteCond %{REQUEST_URI} !^/user/register$
RewriteCond %{REQUEST_URI} !^/user/\w+$
RewriteCond %{REQUEST_URI} !^/user/$
RewriteCond %{REQUEST_URI} !^/login_generic?
RewriteCond %{REQUEST_URI} !^/logged_in?
#report
RewriteCond %{REQUEST_URI} !^/report$
RewriteCond %{REQUEST_URI} !^/report/openness$
RewriteCond %{REQUEST_URI} !^/report/openness/[^/]*$
#organization
RewriteCond %{REQUEST_URI} !^/organization$
RewriteCond %{REQUEST_URI} !^/organization/new$
RewriteCond %{REQUEST_URI} !^/organization?__no_cache__=True$
RewriteCond %{REQUEST_URI} !^/organization/[^/]*$
RewriteCond %{REQUEST_URI} !^/organization/edit/[^/]*$
RewriteCond %{REQUEST_URI} !^/organization/delete/[^/]*$
RewriteCond %{REQUEST_URI} !^/organization/members/[^/]*$
RewriteCond %{REQUEST_URI} !^/organization/member_new/[^/]*$
RewriteCond %{REQUEST_URI} !^/organization/member_delete/[^/]*$
#dataset
RewriteCond %{REQUEST_URI} !^/dataset$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]*$
RewriteCond %{REQUEST_URI} !^/dataset/new_resource/[^/]*$
RewriteCond %{REQUEST_URI} !^/dataset/edit/[^/]*$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_edit/[^/]*$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_data/[^/]+$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_delete/[^/]*$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+/download/[^/]+$
RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+/edit_view/[^/]+$
RewriteCond %{REQUEST_URI} !^/dataset/delete/[^/]+$
#tag
RewriteCond %{REQUEST_URI} !^/tag/[^/]*$
#harvest
RewriteCond %{REQUEST_URI} !^/harvest.*$
#feed
RewriteCond %{REQUEST_URI} !^/feeds/custom.atom[^/]*$
#other
RewriteCond %{REQUEST_URI} !^/$
RewriteCond %{REQUEST_URI} !^/info_page$
#
RewriteCond %{REQUEST_URI} !^/notfound$
#block if no match
RewriteRule (.*) /notfound [P,L,NE]
RewriteCond %{REQUEST_URI} ^/user/dashboard [OR]
RewriteCond %{REQUEST_URI} ^/user/me
RewriteRule (.*) /notfound [P,L,NE]
ErrorLog /var/log/apache2/ckan_default.error.log
CustomLog /var/log/apache2/ckan_default.custom.log combined
LogLevel alert rewrite:trace3 alias:debug
<IfModule mod_rpaf.c>
RPAFenable On
RPAFsethostname On
RPAFproxy_ips 127.0.0.1
</IfModule>
<Directory />
Require all granted
</Directory>
# ProxyPreserveHost On
# ProxyPass /dataset/new http://10.61.47.219/dataset/new
# ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
# ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse /dataset http://141.91.184.90/dataset
# ProxyPassReverse /dataset http://141.91.184.90/dataset
# ProxyPass /solr http://localhost:8983/solr
# ProxyPassReverse /solr http://localhost:8983/solr
# ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
# ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
# ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
# #ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
# ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
# ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
# ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
# ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
# ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
# ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
# ProxyPassReverse /harvest http://141.91.184.90/harvest
# ProxyPass /harvest http://141.91.184.90/harvest
# ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
# ProxyPassReverse /harvest/admin http://141.91.184.90/harvest/admin
# ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
# ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
# ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
# ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
# ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
# ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
# ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
# ProxyPassReverse /organization http://141.91.184.90/organization
# ProxyPassMatch ^/(organization/delete/[^/]+)$ http://10.61.47.219/$1
# ProxyPass /datarequest http://10.61.47.219/datarequest
# ProxyPassReverse /datarequest http://10.61.47.219/datarequest
ProxyPass /dataset/new http://<master-ip>/dataset/new
ProxyPassReverse /dataset/new http://<master-ip>/dataset/new
ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://<master-ip>/$1
ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse /dataset http://<master-ip>/dataset
ProxyPassReverse /dataset http://<master-ip>/dataset
ProxyPass /dataset/new_resource http://<master-ip>/dataset/new_resource
ProxyPassReverse /dataset/new_resource http://<master-ip>/dataset/new_resource
ProxyPassReverse /dataset/new_resource http://<master-ip>/dataset/new_resource
ProxyPassReverse ^/uploads/group/(.*)$ http://<master-ip>/uploads/group/$1
ProxyPassMatch ^/uploads/group/(.*)$ http://<master-ip>/uploads/group/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://<master-ip>/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://<master-ip>/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://<master-ip>/$1
ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://<master-ip>/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://<master-ip>/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://<master-ip>/$1
ProxyPassReverse ^/(harvest.*)$ http://<master-ip>/$1
ProxyPassMatch ^/(harvest.*)$ http://<master-ip>/$1
ProxyPassReverse ^/(api/3/action/package.*)$ http://<master-ip>/$1
ProxyPassMatch ^/(api/3/action/package.*)$ http://<master-ip>/$1
ProxyPassMatch ^/(organization/edit/[^/]+)$ http://<master-ip>/$1
ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<master-ip>/$1
ProxyPass /organization/new http://<interne-IP-Master>/organization/new
ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
</VirtualHost
\ No newline at end of file
......@@ -145,6 +145,12 @@ class OdshApiController(ApiController):
def action(self, logic_function, ver=None):
if logic_function == 'resource_qv4yAI2rgotamXGk98gJ':
return helpers.odsh_get_version_id()
if logic_function == 'resourcelog_qv4yAI2rgotamXGk98gJ':
if config.get('ckanext.odsh.enabletestendpoints', None) == 'True':
log.info('This is an info test log')
log.warning('This is an warning test log')
log.error('This is an error test log')
return 'ok'
try:
function = logic.get_action(logic_function)
side_effect_free = getattr(function, 'side_effect_free', False)
......
......@@ -260,3 +260,9 @@ def odsh_get_version_id():
def odsh_show_testbanner():
return config.get('ckanext.odsh.showtestbanner', 'False') == 'True'
def odsh_is_slave():
c = config.get('ckanext.odsh.slave', None)
if c is None or (c != 'True' and c != 'False'):
return -1
return 1 if c == 'True' else 0
......@@ -293,7 +293,8 @@ class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm
'odsh_spatial_extends_available': odsh_helpers.spatial_extends_available,
'odsh_public_resource_url': odsh_helpers.odsh_public_resource_url,
'odsh_get_version_id': odsh_helpers.odsh_get_version_id,
'odsh_show_testbanner': odsh_helpers.odsh_show_testbanner
'odsh_show_testbanner': odsh_helpers.odsh_show_testbanner,
'odsh_is_slave': odsh_helpers.odsh_is_slave
}
def after_map(self, map):
......
......@@ -20,6 +20,7 @@ log = logging.getLogger(__name__)
DCT = rdflib.namespace.Namespace("http://purl.org/dc/terms/")
DCAT = rdflib.namespace.Namespace("http://www.w3.org/ns/dcat#")
class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
def _license(self, dataset_ref):
......@@ -48,17 +49,20 @@ class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
return ''
def _distribution_format(self, distribution, normalize_ckan_format=True):
imt, label = super(ODSHEuropeanDCATAPProfile,self)._distribution_format(distribution, normalize_ckan_format)
imt, label = super(ODSHEuropeanDCATAPProfile, self)._distribution_format(
distribution, normalize_ckan_format)
if label in resource_formats_import():
label = resource_formats_import()[label]
return imt, label
def graph_from_dataset(self, dataset_dict, dataset_ref):
super(ODSHEuropeanDCATAPProfile,self).graph_from_dataset(dataset_dict, dataset_ref)
super(ODSHEuropeanDCATAPProfile, self).graph_from_dataset(
dataset_dict, dataset_ref)
for s, p, o in self.g.triples((None, rdflib.RDF.type, DCAT.Distribution)):
for s2, p2, o2 in self.g.triples((s, DCT['format'], None)):
if o2.decode() in resource_formats_export():
self.g.set((s, DCT['format'], rdflib.URIRef(resource_formats_export()[o2.decode()])))
self.g.set((s, DCT['format'], rdflib.URIRef(
resource_formats_export()[o2.decode()])))
for s, p, o in self.g.triples((None, DCT.language, None)):
if o.decode() in get_language():
self.g.set((s, p, rdflib.URIRef(get_language()[o.decode()])))
......@@ -71,32 +75,51 @@ class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
for dist in self.g.objects(dataset_ref, DCAT.distribution):
self.g.add((dist, DCT.license, rdflib.URIRef(license)))
class ODSHDCATdeProfile(DCATdeProfile):
def parse_dataset(self, dataset_dict, dataset_ref):
dataset_dict = super(ODSHDCATdeProfile,self).parse_dataset(dataset_dict, dataset_ref)
dataset_dict = super(ODSHDCATdeProfile, self).parse_dataset(
dataset_dict, dataset_ref)
# Enhance Distributions
for distribution in self.g.objects(dataset_ref, DCAT.distribution):
for resource_dict in dataset_dict.get('resources', []):
# Match distribution in graph and distribution in ckan-dict
if unicode(distribution) == resource_uri(resource_dict):
for namespace in [DCATDE, DCATDE_1_0]:
value = self._object_value(distribution, namespace.licenseAttributionByText)
value = self._object_value(
distribution, namespace.licenseAttributionByText)
if value:
ds_utils.insert_new_extras_field(dataset_dict, 'licenseAttributionByText', value)
ds_utils.insert_new_extras_field(
dataset_dict, 'licenseAttributionByText', value)
return dataset_dict
return dataset_dict
def graph_from_dataset(self, dataset_dict, dataset_ref):
super(ODSHDCATdeProfile,self).graph_from_dataset(dataset_dict, dataset_ref)
super(ODSHDCATdeProfile, self).graph_from_dataset(
dataset_dict, dataset_ref)
# Enhance Distributions
# <dcatde:contributorID rdf:resource="http://dcat-ap.de/def/contributors/schleswigHolstein"/>
self.g.add((dataset_ref, DCATDE.contributorID, rdflib.URIRef("http://dcat-ap.de/def/contributors/schleswigHolstein")))
self.g.add((dataset_ref, DCATDE.contributorID, rdflib.URIRef(
"http://dcat-ap.de/def/contributors/schleswigHolstein")))
extras = dataset_dict.get('extras', None)
if extras:
attr = None
for d in extras:
if d['key'] == 'licenseAttributionByText':
attr = d['value']
break
if attr:
self.g.set(
(dataset_ref, DCATDE.licenseAttributionByText, rdflib.Literal(attr)))
for dist in self.g.objects(dataset_ref, DCAT.distribution):
self.g.set(
(dist, DCATDE.licenseAttributionByText, rdflib.Literal(attr)))
_RESOURCE_FORMATS_IMPORT = None
_RESOURCE_FORMATS_EXPORT = None
def resource_formats():
global _RESOURCE_FORMATS_IMPORT
global _RESOURCE_FORMATS_EXPORT
......@@ -105,33 +128,8 @@ def resource_formats():
g = rdflib.Graph()
# Something went wrong with trying to get the file formats online, try to use backup instead
try:
fallback_filepath = config.get('ckan.odsh.resource_formats_fallback_filepath')
# if not fallback_filepath:
# log.warning("Could not find config setting: 'ckan.odsh.resource_formats_fallback_filepath', using fallback instead.")
# fallback_filepath = "/tmp/fileformats.rdf"
# format_european_url = config.get('ckan.odsh.resource_formats_url')
# err_msg = "Could not get file formats from " + str(format_european_url)
# if not format_european_url:
# log.warning("Could not find config setting: 'ckan.odsh.resource_formats_url', using fallback instead.")
# format_european_url = "http://publications.europa.eu/resource/authority/file-type"
# if sys.version_info[0] == 2:
# urlresponse = urllib2.urlopen(urllib2.Request(format_european_url))
# elif sys.version_info[0] == 3: # >=Python3.1
# urlresponse = urllib.request.urlopen(urllib.request.Request(format_european_url))
# g.parse(urlresponse)
# # At the moment, there are 143 different file types listed,
# # if less than 120 are found, something went wrong.
# if len(set([s for s in g.subjects()])) < 120:
# raise ValueError("Not enough subjects")
# # Save the content as backup
# if sys.version_info[0] == 2:
# urlresponse = urllib2.urlopen(urllib2.Request(format_european_url))
# elif sys.version_info[0] == 3: # >=Python3.1
# urlresponse = urllib.request.urlopen(urllib.request.Request(format_european_url))
# err_msg = "Could not write to " + fallback_filepath
# f = open(fallback_filepath, 'w')
# f.write(urlresponse.read())
# f.close()
fallback_filepath = config.get(
'ckan.odsh.resource_formats_fallback_filepath')
g.parse(fallback_filepath)
assert len(set([s for s in g.subjects()])) > 120
except:
......@@ -144,12 +142,14 @@ def resource_formats():
_RESOURCE_FORMATS_EXPORT[elem.split('/')[-1]] = elem
_RESOURCE_FORMATS_IMPORT[elem] = elem.split('/')[-1]
def resource_formats_export():
global _RESOURCE_FORMATS_EXPORT
if not _RESOURCE_FORMATS_EXPORT:
resource_formats()
return _RESOURCE_FORMATS_EXPORT
def resource_formats_import():
global _RESOURCE_FORMATS_IMPORT
if not _RESOURCE_FORMATS_IMPORT:
......@@ -159,6 +159,7 @@ def resource_formats_import():
_LANGUAGES = None
def get_language():
''' When datasets are exported in rdf-format, their language-tag
should be given as
......@@ -175,7 +176,8 @@ def get_language():
_LANGUAGES = {}
languages_file_path = config.get('ckanext.odsh.language.mapping')
if not languages_file_path:
log.warning("Could not find config setting: 'ckanext.odsh.language.mapping', using fallback instead.")
log.warning(
"Could not find config setting: 'ckanext.odsh.language.mapping', using fallback instead.")
languages_file_path = '/usr/lib/ckan/default/src/ckanext-odsh/languages.json'
with open(languages_file_path) as languages_file:
try:
......
......@@ -13,6 +13,8 @@
{{ super() }}
{% set matomo_url = h.odsh_tracking_url()%}
{% set matomo_id = h.odsh_tracking_id()%}
<meta data-name="type" content="{{h.odsh_is_slave()}}">
<!-- Matomo -->
<script type="text/javascript">
var _paq = _paq || [];
......
......@@ -22,7 +22,9 @@ class RequestHandler(BaseHTTPRequestHandler):
# GET
def do_GET(self):
self.send_response(requests.codes.ok)
self.send_header('Content-Type', 'application/json; charset=utf-8')
# self.send_header('Content-Type', 'application/json; charset=utf-8')
self.send_header(
'Content-Type', 'application/rdf+xml; charset=utf-8')
self.end_headers()
self.wfile.write(data.encode("utf-8"))
......@@ -46,8 +48,9 @@ class HarvestServerMock(threading.Thread):
self._stop_event = threading.Event()
self.thread_name = self.__class__
self.server = HTTPServer((hostName, hostPort), RequestHandler)
threading.Thread.__init__(self, name=self.thread_name, target=self.server.serve_forever)
# self.setDaemon(True)
threading.Thread.__init__(
self, name=self.thread_name, target=self.server.serve_forever)
self.setDaemon(True)
# def run(self):
......
......@@ -8,6 +8,7 @@ import os
import sys
import ConfigParser
from collections import OrderedDict
from urlparse import urlsplit
expected_commit = '8cd9576884cae6abe50a27c891434cb9fe87ced2'
......@@ -139,12 +140,15 @@ class TestEnv:
def test_plugins(self):
value = config.get('ckan.plugins', [])
for p in ['odsh', 'odsh_autocomplete']:
for p in ['odsh']:
assert p in value, 'missing plugin:' + p
if isMaster():
for p in ['odsh_icap', 'odsh_dcat_harvest', 'odsh_harvest']:
assert p in value, 'missing plugin:' + p
if isSlave():
for p in ['odsh_autocomplete']:
assert p in value, 'missing plugin:' + p
# pdb.set_trace()
......@@ -176,3 +180,92 @@ class TestEnv:
# # version = checkConfig('ckanext.odsh.version')
# assert version == expected_commit, "wrong version: {was}!={exp}".format(was=version, exp=expected_commit)
def test_routes(self):
if isMaster():
return
expexted_rules = \
""" ProxyPass /dataset/new http://10.61.47.219/dataset/new
ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse /dataset http://141.91.184.90/dataset
ProxyPassReverse /dataset http://141.91.184.90/dataset
ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
#ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
ProxyPassMatch ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
ProxyPassReverse /harvest http://141.91.184.90/harvest
ProxyPass /harvest http://141.91.184.90/harvest
ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
ProxyPass /organization/new http://<interne-IP-Master>/organization/new
ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
ProxyPassReverse /organization http://<interne-IP-Master>/organization
ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<interne-IP-Master>/$1
# ProxyPass /datarequest http://10.61.47.219/datarequest
# ProxyPassReverse /datarequest http://10.61.47.219/datarequest
"""
expected = self._parse_rules(expexted_rules.splitlines())
# with open('ckan_default.conf', 'r') as aconfig:
with open('/etc/apache2/sites-enabled/ckan_default.conf', 'r') as aconfig:
lines = aconfig.readlines()
# pdb.set_trace()
current = self._parse_rules(lines, check_host=True)
if len(expected.symmetric_difference(current)) > 0:
diff = expected.difference(current)
if len(diff) > 0:
print('WARNING: missing routes:')
for r in sorted(diff, key=lambda tup: tup[1]):
print('{cmd} {source} {target}'.format(
cmd=r[0], source=r[1], target='http://<interne-IP-Master>'+r[2]))
diff = current.difference(expected)
if len(diff) > 0:
print('WARNING: found unexpected routes:')
for r in sorted(diff, key=lambda tup: tup[1]):
print('{cmd} {source} {target}'.format(
cmd=r[0], source=r[1], target='<target>'+r[2]))
def _parse_rules(self, lines, check_host=False):
rules = set(['ProxyPassMatch', 'ProxyPassReverse', 'ProxyPass'])
ret = []
hosts = set()
for line in lines:
tokens = filter(lambda t: t.strip(), line.strip().split(' '))
if not tokens or tokens[0] not in rules:
continue
assert len(tokens) == 3
# for token in tokens:
# print(token)
f = urlsplit(tokens[2])
ret.append((tokens[0], tokens[1], f.path))
hosts.add(f.netloc)
if check_host and len(hosts) > 1:
print('WARNING: found multiple target hosts: {hosts}'.format(
hosts=', '.join(hosts)))
return set(ret)
from ckanext.odsh.tests.test_helpers import AppProxy
import ckanext.odsh.tests.test_helpers as testhelpers
import ckan.tests.factories as factories
import uuid
import pdb
......@@ -9,18 +10,19 @@ import subprocess
class TestHarvest:
def _create_harvester(self):
def _create_harvester(self, source_type):
guid = str(uuid.uuid4())
self.org = factories.Organization(
name="test_harvest_org_" + guid,
users=[{'name': 'ckanuser', 'capacity': 'admin'}]
)
# self.org = factories.Organization(
# name="test_harvest_org_" + guid,
# users=[{'name': 'ckanuser', 'capacity': 'admin'}]
# )
self._get_app().login()
response = self.app.get('/harvest/new')
form = response.forms[0]
title = 'harvest_test_source_' + guid
form['title'] = title
form['url'] = "http://localhost:5002/" + guid
form['source_type'] = source_type
final_response = self.app.submit_form(form)
# submit_response = self.app.submit_form(form)
# assert 'missing value' in submit_response
......@@ -32,7 +34,7 @@ class TestHarvest:
def test_harvest_dcat(self):
# Arrange
harvester = self._create_harvester()
harvester = self._create_harvester('dcat_rdf')
harvest_sever_mock.data = self._load_rdf_catalog()
server = HarvestServerMock()
server.start()
......@@ -41,7 +43,7 @@ class TestHarvest:
def run_harvest(self, harvester):
out = subprocess.check_output([
"paster", "--plugin=ckanext-harvest", "harvester", "run_test", harvester, "--config=/etc/ckan/default/development.ini"])
"paster", "--plugin=ckanext-harvest", "harvester", "run_test", harvester, '--config='+testhelpers.getConfigPath()])
def _get_app(self):
if not hasattr(self, 'app'):
......@@ -50,7 +52,6 @@ class TestHarvest:
return self.app
def _load_rdf_catalog(self):
# with open('ckanext/odsh/tests/rdf_catalog.xml', 'r') as rdffile:
with open('ckanext/odsh/tests/rdf_catalog_empty.xml', 'r') as rdffile:
with open('ckanext/odsh/tests/rdf_catalog.xml', 'r') as rdffile:
data = rdffile.read()
return data
......@@ -4,6 +4,7 @@ import functools
from ckan.common import config
import ckan.config.middleware
import ckan.tests.helpers as helpers
import sys
def odsh_test():
......@@ -38,13 +39,27 @@ def _get_test_app():
return app
def getConfigPath():
path = None
for a in sys.argv:
if a.startswith('--with-pylons'):
path = a.split('=')[1]
break
assert path, 'could not find config parameter'
return path
class AppProxy:
def login(self):
app = _get_test_app()
response = app.get('/user/login')
login_form = response.forms[0]
login_form['login'] = 'ckanuser'
login_form['password'] = 'pass'
user = config.get('ckanext.odsh.testuser', None)
assert user
password = config.get('ckanext.odsh.testuserpass', None)
assert password
login_form['login'] = user
login_form['password'] = password
submit_response = login_form.submit('save')
final_response = helpers.webtest_maybe_follow(submit_response)
self.app = app
......
......@@ -9,6 +9,8 @@ import urllib2
import ckan.tests.helpers as helpers
from ckan.common import config
import ckan.config.middleware
from ckanext.dcatde.profiles import DCATDE, DCAT, DCATDE_1_0
import pdb
# run with nosetests --ckan --nologcapture --with-pylons=<config to test> ckanext/odsh/tests/test_routes.py
......@@ -17,7 +19,6 @@ DCAT = Namespace("http://www.w3.org/ns/dcat#")
DCT = Namespace("http://purl.org/dc/terms/")
def _get_test_app():
app = ckan.config.middleware.make_app(config['global_conf'], **config)
app = helpers.CKANTestApp(app)
......@@ -44,9 +45,10 @@ class TestRDFExport:
issued='27-01-2000',
extras=extras,
owner_org='test',
license_id="http://dcat-ap.de/def/licenses/dl-by-de/2.0")
license_id="http://dcat-ap.de/def/licenses/dl-by-de/2.0",
licenseAttributionByText='foo')
factories.Resource(
package_id=dataset['id'], license=dataset['license_id'])
package_id=dataset['id'], license=dataset['license_id'], licenseAttributionByText='foo')
factories.Resource(
package_id=dataset['id'])
......@@ -54,9 +56,25 @@ class TestRDFExport:
response = self._get_app().get('/dataset/'+dataset['name']+'.rdf')
g.parse(data=response.body)
lic = self._extract_licenses(g)
att = self._extract_licenseAttributions(g)
assert len(lic) == 3
assert len(att) == 3
assert len(set([str(l) for l in lic])) == 1
assert len(set([str(a) for a in att])) == 1
assert str(att[0]) == 'text'
def test_catalog(self):
g = rdflib.Graph()
response = self._get_app().get('/catalog.xml')
g.parse(data=response.body)
datasets = list(g.subjects(RDF.type, DCAT.Dataset))
response = self._get_app().get('/api/3/action/package_search')
plist = json.loads(response.body)
assert len(datasets) == plist['result']['count'], "{rdf} != {cat}".format(
rdf=len(datasets), cat=plist['result']['count'])
def _get_app(self):
if not hasattr(self, 'app'):
......@@ -78,3 +96,20 @@ class TestRDFExport:
ret.append(l)
return ret
def _extract_licenseAttributions(self, g):
datasets = list(g.subjects(RDF.type, DCAT.Dataset))
assert len(datasets) == 1
dataset = datasets[0]
ret = []
for namespace in [DCATDE, DCATDE_1_0]:
ret += list(g.objects(dataset, namespace.licenseAttributionByText))
distributions = list(g.objects(dataset, DCAT.distribution))
for d in distributions:
for namespace in [DCATDE, DCATDE_1_0]:
ret += list(g.objects(d, namespace.licenseAttributionByText))
return ret
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment