diff --git a/ckan_default.conf b/ckan_default.conf
new file mode 100644
index 0000000000000000000000000000000000000000..891eaf22b4291dc46b74b6b2ba988b5e9703a65e
--- /dev/null
+++ b/ckan_default.conf
@@ -0,0 +1,167 @@
+<VirtualHost *:80>
+    WSGIScriptAlias / /etc/ckan/default/apache.wsgi
+
+    # Pass authorization info on (needed for rest api).
+    WSGIPassAuthorization On
+
+    # Deploy as a daemon (avoids conflicts between CKAN instances).
+    WSGIDaemonProcess ckan_default display-name=ckan_default processes=2 threads=15
+
+    WSGIProcessGroup ckan_default
+    RewriteEngine On
+
+    #resources
+    RewriteCond %{REQUEST_URI} !^/.*.css
+    RewriteCond %{REQUEST_URI} !^/base/images/
+    RewriteCond %{REQUEST_URI} !^/base/fonts/
+    RewriteCond %{REQUEST_URI} !^/fanstatic/
+    RewriteCond %{REQUEST_URI} !^/scripts/vendor/
+    RewriteCond %{REQUEST_URI} !^/uploads/group/
+    #api
+    RewriteCond %{REQUEST_URI} !^/api/i18n/de$
+    RewriteCond %{REQUEST_URI} !^/api/2/.*$
+    RewriteCond %{REQUEST_URI} !^/api/3/action/package.*$
+    RewriteCond %{REQUEST_URI} !^/catalog.xml[^/]*$
+    #user       
+    RewriteCond %{REQUEST_URI} !^/user/login$
+    RewriteCond %{REQUEST_URI} !^/user/logged_out_redirect$
+    RewriteCond %{REQUEST_URI} !^/user/reset$
+    RewriteCond %{REQUEST_URI} !^/user/edit$
+    RewriteCond %{REQUEST_URI} !^/user/register$
+    RewriteCond %{REQUEST_URI} !^/user/\w+$
+    RewriteCond %{REQUEST_URI} !^/user/$
+    RewriteCond %{REQUEST_URI} !^/login_generic?
+    RewriteCond %{REQUEST_URI} !^/logged_in?
+    #report
+    RewriteCond %{REQUEST_URI} !^/report$
+    RewriteCond %{REQUEST_URI} !^/report/openness$
+    RewriteCond %{REQUEST_URI} !^/report/openness/[^/]*$
+    #organization
+    RewriteCond %{REQUEST_URI} !^/organization$
+    RewriteCond %{REQUEST_URI} !^/organization/new$
+    RewriteCond %{REQUEST_URI} !^/organization?__no_cache__=True$
+    RewriteCond %{REQUEST_URI} !^/organization/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/organization/edit/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/organization/delete/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/organization/members/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/organization/member_new/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/organization/member_delete/[^/]*$
+    #dataset
+    RewriteCond %{REQUEST_URI} !^/dataset$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/dataset/new_resource/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/dataset/edit/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_edit/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_data/[^/]+$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource_delete/[^/]*$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+/download/[^/]+$
+    RewriteCond %{REQUEST_URI} !^/dataset/[^/]+/resource/[^/]+/edit_view/[^/]+$
+    RewriteCond %{REQUEST_URI} !^/dataset/delete/[^/]+$
+    #tag
+    RewriteCond %{REQUEST_URI} !^/tag/[^/]*$
+    #harvest
+    RewriteCond %{REQUEST_URI} !^/harvest.*$
+    #feed
+    RewriteCond %{REQUEST_URI} !^/feeds/custom.atom[^/]*$
+    #other
+    RewriteCond %{REQUEST_URI} !^/$
+    RewriteCond %{REQUEST_URI} !^/info_page$
+    #
+    RewriteCond %{REQUEST_URI} !^/notfound$
+    #block if no match
+    RewriteRule (.*) /notfound [P,L,NE]
+
+    RewriteCond %{REQUEST_URI} ^/user/dashboard [OR]
+    RewriteCond %{REQUEST_URI} ^/user/me
+    RewriteRule (.*) /notfound [P,L,NE]
+
+
+    ErrorLog /var/log/apache2/ckan_default.error.log
+    CustomLog /var/log/apache2/ckan_default.custom.log combined
+LogLevel alert rewrite:trace3 alias:debug
+
+    <IfModule mod_rpaf.c>
+        RPAFenable On
+        RPAFsethostname On
+        RPAFproxy_ips 127.0.0.1
+    </IfModule>
+
+    <Directory />
+        Require all granted
+    </Directory>
+
+    # ProxyPreserveHost On
+
+    # ProxyPass /dataset/new http://10.61.47.219/dataset/new
+    # ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
+    # ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse /dataset http://141.91.184.90/dataset
+    # ProxyPassReverse /dataset http://141.91.184.90/dataset
+    # ProxyPass /solr http://localhost:8983/solr
+    # ProxyPassReverse /solr http://localhost:8983/solr
+    # ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
+    # ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    # ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    # #ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
+    # ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    # ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
+    # ProxyPassReverse /harvest http://141.91.184.90/harvest
+    # ProxyPass /harvest http://141.91.184.90/harvest
+    # ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
+    # ProxyPassReverse /harvest/admin http://141.91.184.90/harvest/admin
+    # ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    # ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
+    # ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
+    # ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    # ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    # ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1 
+    # ProxyPassReverse /organization http://141.91.184.90/organization
+    # ProxyPassMatch ^/(organization/delete/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPass /datarequest http://10.61.47.219/datarequest
+    # ProxyPassReverse /datarequest http://10.61.47.219/datarequest
+
+       ProxyPass /dataset/new http://<master-ip>/dataset/new
+   ProxyPassReverse /dataset/new http://<master-ip>/dataset/new
+   ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse /dataset http://<master-ip>/dataset
+   ProxyPassReverse /dataset http://<master-ip>/dataset
+   ProxyPass /dataset/new_resource http://<master-ip>/dataset/new_resource
+   ProxyPassReverse /dataset/new_resource http://<master-ip>/dataset/new_resource
+   ProxyPassReverse /dataset/new_resource http://<master-ip>/dataset/new_resource
+   ProxyPassReverse ^/uploads/group/(.*)$ http://<master-ip>/uploads/group/$1
+   ProxyPassMatch ^/uploads/group/(.*)$ http://<master-ip>/uploads/group/$1
+   ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(harvest.*)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(harvest.*)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(api/3/action/package.*)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(api/3/action/package.*)$ http://<master-ip>/$1
+   ProxyPassMatch ^/(organization/edit/[^/]+)$ http://<master-ip>/$1
+   ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<master-ip>/$1
+   ProxyPass /organization/new http://<interne-IP-Master>/organization/new
+ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
+</VirtualHost
\ No newline at end of file
diff --git a/ckanext/odsh/controller.py b/ckanext/odsh/controller.py
index 20f9eed2008de193b63862d1ac48475d453a89d7..77076573f7570ef28089d1519ecb61121cdd964f 100644
--- a/ckanext/odsh/controller.py
+++ b/ckanext/odsh/controller.py
@@ -145,6 +145,12 @@ class OdshApiController(ApiController):
     def action(self, logic_function, ver=None):
         if logic_function == 'resource_qv4yAI2rgotamXGk98gJ':
             return helpers.odsh_get_version_id()
+        if logic_function == 'resourcelog_qv4yAI2rgotamXGk98gJ':
+            if config.get('ckanext.odsh.enabletestendpoints', None) == 'True':
+                log.info('This is an info test log')
+                log.warning('This is an warning test log')
+                log.error('This is an error test log')
+                return 'ok'
         try:
             function = logic.get_action(logic_function)
             side_effect_free = getattr(function, 'side_effect_free', False)
diff --git a/ckanext/odsh/helpers.py b/ckanext/odsh/helpers.py
index 8b43c7c00897485f8c98b50c5c1cc035d24f027e..1fe0abf4fb514c785b90e1f1461659691d15d463 100644
--- a/ckanext/odsh/helpers.py
+++ b/ckanext/odsh/helpers.py
@@ -260,3 +260,9 @@ def odsh_get_version_id():
 
 def odsh_show_testbanner():
     return config.get('ckanext.odsh.showtestbanner', 'False') == 'True'
+
+def odsh_is_slave():
+    c = config.get('ckanext.odsh.slave', None)
+    if c is None or (c != 'True' and c != 'False'):
+        return -1 
+    return 1 if c == 'True' else 0
diff --git a/ckanext/odsh/plugin.py b/ckanext/odsh/plugin.py
index 6a2cea7e3ea02ce248a24853b516dc232f563c06..b8f211afbfa74dfb885195c9d6022e1d6bec1b1c 100644
--- a/ckanext/odsh/plugin.py
+++ b/ckanext/odsh/plugin.py
@@ -293,7 +293,8 @@ class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm
                 'odsh_spatial_extends_available': odsh_helpers.spatial_extends_available,
                 'odsh_public_resource_url': odsh_helpers.odsh_public_resource_url,
                 'odsh_get_version_id': odsh_helpers.odsh_get_version_id,
-                'odsh_show_testbanner': odsh_helpers.odsh_show_testbanner
+                'odsh_show_testbanner': odsh_helpers.odsh_show_testbanner,
+                'odsh_is_slave': odsh_helpers.odsh_is_slave
                 }
 
     def after_map(self, map):
diff --git a/ckanext/odsh/profiles.py b/ckanext/odsh/profiles.py
index 6787dfde1cb32288ddaa24a322c890a7da09a4ba..d9532758d517c39d9a995a3cad70b24df7bf2390 100644
--- a/ckanext/odsh/profiles.py
+++ b/ckanext/odsh/profiles.py
@@ -1,4 +1,4 @@
-from ckanext.dcatde.profiles import DCATdeProfile, DCATDE, DCAT, VCARD, dcat_theme_prefix , DCATDE_1_0
+from ckanext.dcatde.profiles import DCATdeProfile, DCATDE, DCAT, VCARD, dcat_theme_prefix, DCATDE_1_0
 from ckanext.dcat.utils import resource_uri
 from ckanext.dcat.profiles import EuropeanDCATAPProfile, DCT, URIRefOrLiteral
 from ckan.model.license import LicenseRegister
@@ -20,6 +20,7 @@ log = logging.getLogger(__name__)
 DCT = rdflib.namespace.Namespace("http://purl.org/dc/terms/")
 DCAT = rdflib.namespace.Namespace("http://www.w3.org/ns/dcat#")
 
+
 class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
 
     def _license(self, dataset_ref):
@@ -29,7 +30,7 @@ class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
             license_uri2id = {}
             license_title2id = {}
             for license_id, license in LicenseRegister().items():
-                license_uri2id[license_id] = license_id 
+                license_uri2id[license_id] = license_id
                 license_uri2id[license.url] = license_id
                 license_title2id[license.title] = license_id
             self._licenceregister_cache = license_uri2id, license_title2id
@@ -48,125 +49,125 @@ class ODSHEuropeanDCATAPProfile(EuropeanDCATAPProfile):
         return ''
 
     def _distribution_format(self, distribution, normalize_ckan_format=True):
-        imt, label = super(ODSHEuropeanDCATAPProfile,self)._distribution_format(distribution, normalize_ckan_format)            
+        imt, label = super(ODSHEuropeanDCATAPProfile, self)._distribution_format(
+            distribution, normalize_ckan_format)
         if label in resource_formats_import():
             label = resource_formats_import()[label]
         return imt, label
-        
+
     def graph_from_dataset(self, dataset_dict, dataset_ref):
-        super(ODSHEuropeanDCATAPProfile,self).graph_from_dataset(dataset_dict, dataset_ref)
-        for s,p,o in self.g.triples((None, rdflib.RDF.type, DCAT.Distribution)):
+        super(ODSHEuropeanDCATAPProfile, self).graph_from_dataset(
+            dataset_dict, dataset_ref)
+        for s, p, o in self.g.triples((None, rdflib.RDF.type, DCAT.Distribution)):
             for s2, p2, o2 in self.g.triples((s, DCT['format'], None)):
                 if o2.decode() in resource_formats_export():
-                    self.g.set((s, DCT['format'], rdflib.URIRef(resource_formats_export()[o2.decode()])))
-        for s,p,o in self.g.triples((None, DCT.language, None)):
+                    self.g.set((s, DCT['format'], rdflib.URIRef(
+                        resource_formats_export()[o2.decode()])))
+        for s, p, o in self.g.triples((None, DCT.language, None)):
             if o.decode() in get_language():
-                 self.g.set((s, p, rdflib.URIRef(get_language()[o.decode()])))                 
+                self.g.set((s, p, rdflib.URIRef(get_language()[o.decode()])))
             elif type(o) == rdflib.Literal and type(URIRefOrLiteral(o.decode())) == rdflib.URIRef:
-                self.g.set((s, p, rdflib.URIRef(o.decode()) ))
+                self.g.set((s, p, rdflib.URIRef(o.decode())))
 
         license = dataset_dict.get('license_id', None)
         if license:
             self.g.add((dataset_ref, DCT.license, rdflib.URIRef(license)))
             for dist in self.g.objects(dataset_ref, DCAT.distribution):
                 self.g.add((dist, DCT.license, rdflib.URIRef(license)))
-        
 
 class ODSHDCATdeProfile(DCATdeProfile):
     def parse_dataset(self, dataset_dict, dataset_ref):
-        dataset_dict = super(ODSHDCATdeProfile,self).parse_dataset(dataset_dict, dataset_ref)
+        dataset_dict = super(ODSHDCATdeProfile, self).parse_dataset(
+            dataset_dict, dataset_ref)
         # Enhance Distributions
         for distribution in self.g.objects(dataset_ref, DCAT.distribution):
             for resource_dict in dataset_dict.get('resources', []):
                 # Match distribution in graph and distribution in ckan-dict
                 if unicode(distribution) == resource_uri(resource_dict):
                     for namespace in [DCATDE, DCATDE_1_0]:
-                        value = self._object_value(distribution, namespace.licenseAttributionByText)
+                        value = self._object_value(
+                            distribution, namespace.licenseAttributionByText)
                         if value:
-                            ds_utils.insert_new_extras_field(dataset_dict, 'licenseAttributionByText', value)
+                            ds_utils.insert_new_extras_field(
+                                dataset_dict, 'licenseAttributionByText', value)
                             return dataset_dict
         return dataset_dict
 
     def graph_from_dataset(self, dataset_dict, dataset_ref):
-        super(ODSHDCATdeProfile,self).graph_from_dataset(dataset_dict, dataset_ref)
+        super(ODSHDCATdeProfile, self).graph_from_dataset(
+            dataset_dict, dataset_ref)
         # Enhance Distributions
         # <dcatde:contributorID rdf:resource="http://dcat-ap.de/def/contributors/schleswigHolstein"/>
-        self.g.add((dataset_ref, DCATDE.contributorID, rdflib.URIRef("http://dcat-ap.de/def/contributors/schleswigHolstein")))
+        self.g.add((dataset_ref, DCATDE.contributorID, rdflib.URIRef(
+            "http://dcat-ap.de/def/contributors/schleswigHolstein")))
+
+        extras = dataset_dict.get('extras', None)
+        if extras:
+            attr = None
+            for d in extras:
+                if d['key'] == 'licenseAttributionByText':
+                    attr = d['value']
+                    break
+            if attr:
+                self.g.set(
+                    (dataset_ref, DCATDE.licenseAttributionByText, rdflib.Literal(attr)))
+                for dist in self.g.objects(dataset_ref, DCAT.distribution):
+                    self.g.set(
+                        (dist, DCATDE.licenseAttributionByText, rdflib.Literal(attr)))
+
 
-        
 _RESOURCE_FORMATS_IMPORT = None
 _RESOURCE_FORMATS_EXPORT = None
 
+
 def resource_formats():
     global _RESOURCE_FORMATS_IMPORT
     global _RESOURCE_FORMATS_EXPORT
     _RESOURCE_FORMATS_IMPORT = {}
     _RESOURCE_FORMATS_EXPORT = {}
     g = rdflib.Graph()
-        # Something went wrong with trying to get the file formats online, try to use backup instead
+    # Something went wrong with trying to get the file formats online, try to use backup instead
     try:
-        fallback_filepath = config.get('ckan.odsh.resource_formats_fallback_filepath')
-        # if not fallback_filepath:
-        #     log.warning("Could not find config setting: 'ckan.odsh.resource_formats_fallback_filepath', using fallback instead.")
-        #     fallback_filepath = "/tmp/fileformats.rdf"
-        # format_european_url = config.get('ckan.odsh.resource_formats_url')
-        # err_msg = "Could not get file formats from " + str(format_european_url)
-        # if not format_european_url:
-        #     log.warning("Could not find config setting: 'ckan.odsh.resource_formats_url', using fallback instead.")
-        #     format_european_url = "http://publications.europa.eu/resource/authority/file-type"
-        # if sys.version_info[0] == 2:
-        #     urlresponse = urllib2.urlopen(urllib2.Request(format_european_url))
-        # elif sys.version_info[0] == 3:  # >=Python3.1
-        #     urlresponse = urllib.request.urlopen(urllib.request.Request(format_european_url))
-        # g.parse(urlresponse)
-        # # At the moment, there are 143 different file types listed, 
-        # # if less than 120 are found, something went wrong.       
-        # if len(set([s for s in g.subjects()])) < 120:
-        #     raise ValueError("Not enough subjects")
-        # # Save the content as backup
-        # if sys.version_info[0] == 2:
-        #     urlresponse = urllib2.urlopen(urllib2.Request(format_european_url))
-        # elif sys.version_info[0] == 3:  # >=Python3.1
-        #     urlresponse = urllib.request.urlopen(urllib.request.Request(format_european_url))
-        # err_msg = "Could not write to " + fallback_filepath
-        # f = open(fallback_filepath, 'w')
-        # f.write(urlresponse.read())
-        # f.close()
+        fallback_filepath = config.get(
+            'ckan.odsh.resource_formats_fallback_filepath')
         g.parse(fallback_filepath)
         assert len(set([s for s in g.subjects()])) > 120
     except:
         log.exception("failed to process resource_formats")
         raise Exception('failed to load formats')
     file_types = [subj.decode() for subj in g.subjects()]
-    
+
     for elem in sorted(set(file_types)):
         if elem.split('/')[-1] != 'file-type':
             _RESOURCE_FORMATS_EXPORT[elem.split('/')[-1]] = elem
             _RESOURCE_FORMATS_IMPORT[elem] = elem.split('/')[-1]
 
+
 def resource_formats_export():
     global _RESOURCE_FORMATS_EXPORT
     if not _RESOURCE_FORMATS_EXPORT:
         resource_formats()
     return _RESOURCE_FORMATS_EXPORT
-    
+
+
 def resource_formats_import():
     global _RESOURCE_FORMATS_IMPORT
     if not _RESOURCE_FORMATS_IMPORT:
         resource_formats()
     return _RESOURCE_FORMATS_IMPORT
 
-    
+
 _LANGUAGES = None
 
+
 def get_language():
-    ''' When datasets are exported in rdf-format, their language-tag 
+    ''' When datasets are exported in rdf-format, their language-tag
     should be given as
     "<dct:language rdf:resource="http://publications.europa.eu/.../XXX"/>",
     where XXX represents the language conforming to iso-639-3 standard.
     However, some imported datasets represent their language as
-    "<dct:language>de</dct:language>", which will be interpreted here as 
-    iso-639-1 values. As we do not display the language setting in the 
+    "<dct:language>de</dct:language>", which will be interpreted here as
+    iso-639-1 values. As we do not display the language setting in the
     web frontend, this function only assures the correct export format,
     by using 'languages.json' as mapping table.
     '''
@@ -175,7 +176,8 @@ def get_language():
         _LANGUAGES = {}
         languages_file_path = config.get('ckanext.odsh.language.mapping')
         if not languages_file_path:
-            log.warning("Could not find config setting: 'ckanext.odsh.language.mapping', using fallback instead.")
+            log.warning(
+                "Could not find config setting: 'ckanext.odsh.language.mapping', using fallback instead.")
             languages_file_path = '/usr/lib/ckan/default/src/ckanext-odsh/languages.json'
         with open(languages_file_path) as languages_file:
             try:
@@ -188,4 +190,4 @@ def get_language():
             for language_line in language_mapping_table:
                 _LANGUAGES[language_line[0]] = language_line[1]
 
-    return _LANGUAGES
\ No newline at end of file
+    return _LANGUAGES
diff --git a/ckanext/odsh/templates/base.html b/ckanext/odsh/templates/base.html
index a5e280e6fce3f0659f2b7db79ea4fd6b1bc1b0b3..debf3e341031fb6cf5bdfd328b4accc3fa23eb20 100644
--- a/ckanext/odsh/templates/base.html
+++ b/ckanext/odsh/templates/base.html
@@ -13,6 +13,8 @@
 {{ super() }}
 {% set matomo_url = h.odsh_tracking_url()%}
 {% set matomo_id = h.odsh_tracking_id()%}
+<meta data-name="type" content="{{h.odsh_is_slave()}}">
+
 <!-- Matomo -->
 <script type="text/javascript">
   var _paq = _paq || [];
diff --git a/ckanext/odsh/tests/harvest_sever_mock.py b/ckanext/odsh/tests/harvest_sever_mock.py
index 2828ed849eb7fc82cec8dc06bb2bc214352163f1..cc20bdc6f2aed429c9aee37a8a2b1eaeee843016 100644
--- a/ckanext/odsh/tests/harvest_sever_mock.py
+++ b/ckanext/odsh/tests/harvest_sever_mock.py
@@ -22,7 +22,9 @@ class RequestHandler(BaseHTTPRequestHandler):
     # GET
     def do_GET(self):
         self.send_response(requests.codes.ok)
-        self.send_header('Content-Type', 'application/json; charset=utf-8')
+        # self.send_header('Content-Type', 'application/json; charset=utf-8')
+        self.send_header(
+            'Content-Type', 'application/rdf+xml; charset=utf-8')
         self.end_headers()
         self.wfile.write(data.encode("utf-8"))
 
@@ -46,8 +48,9 @@ class HarvestServerMock(threading.Thread):
         self._stop_event = threading.Event()
         self.thread_name = self.__class__
         self.server = HTTPServer((hostName, hostPort), RequestHandler)
-        threading.Thread.__init__(self, name=self.thread_name, target=self.server.serve_forever)
-        # self.setDaemon(True)
+        threading.Thread.__init__(
+            self, name=self.thread_name, target=self.server.serve_forever)
+        self.setDaemon(True)
 
 
 #     def run(self):
diff --git a/ckanext/odsh/tests/test_env.py b/ckanext/odsh/tests/test_env.py
index 4c7ca1c189568e2c085219eca18e55904578ea87..12f749d858f9aa3245cf8ad16f03e4919657d599 100644
--- a/ckanext/odsh/tests/test_env.py
+++ b/ckanext/odsh/tests/test_env.py
@@ -8,6 +8,7 @@ import os
 import sys
 import ConfigParser
 from collections import OrderedDict
+from urlparse import urlsplit
 
 expected_commit = '8cd9576884cae6abe50a27c891434cb9fe87ced2'
 
@@ -139,12 +140,15 @@ class TestEnv:
 
     def test_plugins(self):
         value = config.get('ckan.plugins', [])
-        for p in ['odsh', 'odsh_autocomplete']:
+        for p in ['odsh']:
             assert p in value, 'missing plugin:' + p
 
         if isMaster():
             for p in ['odsh_icap', 'odsh_dcat_harvest', 'odsh_harvest']:
                 assert p in value, 'missing plugin:' + p
+        if isSlave():
+            for p in ['odsh_autocomplete']:
+                assert p in value, 'missing plugin:' + p
 
         # pdb.set_trace()
 
@@ -176,3 +180,92 @@ class TestEnv:
     #     # version = checkConfig('ckanext.odsh.version')
     #     assert version == expected_commit, "wrong version: {was}!={exp}".format(was=version, exp=expected_commit)
 
+    def test_routes(self):
+        if isMaster():
+            return
+
+        expexted_rules = \
+            """ ProxyPass /dataset/new http://10.61.47.219/dataset/new
+    ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
+    ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse /dataset http://141.91.184.90/dataset
+    ProxyPassReverse /dataset http://141.91.184.90/dataset
+    ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
+    ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    #ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
+    ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
+    ProxyPassReverse /harvest http://141.91.184.90/harvest
+    ProxyPass /harvest http://141.91.184.90/harvest
+    ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
+    ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
+    ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
+    ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
+    ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1 
+    ProxyPass /organization/new http://<interne-IP-Master>/organization/new
+    ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
+    ProxyPassReverse /organization http://<interne-IP-Master>/organization
+    ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<interne-IP-Master>/$1
+
+    # ProxyPass /datarequest http://10.61.47.219/datarequest
+    # ProxyPassReverse /datarequest http://10.61.47.219/datarequest
+    """
+
+        expected = self._parse_rules(expexted_rules.splitlines())
+
+        # with open('ckan_default.conf', 'r') as aconfig:
+        with open('/etc/apache2/sites-enabled/ckan_default.conf', 'r') as aconfig:
+            lines = aconfig.readlines()
+            # pdb.set_trace()
+            current = self._parse_rules(lines, check_host=True)
+            if len(expected.symmetric_difference(current)) > 0:
+                diff = expected.difference(current)
+                if len(diff) > 0:
+                    print('WARNING: missing routes:')
+                    for r in sorted(diff, key=lambda tup: tup[1]):
+                        print('{cmd} {source} {target}'.format(
+                            cmd=r[0], source=r[1], target='http://<interne-IP-Master>'+r[2]))
+                diff = current.difference(expected)
+                if len(diff) > 0:
+                    print('WARNING: found unexpected routes:')
+                    for r in sorted(diff, key=lambda tup: tup[1]):
+                        print('{cmd} {source} {target}'.format(
+                            cmd=r[0], source=r[1], target='<target>'+r[2]))
+
+    def _parse_rules(self, lines, check_host=False):
+        rules = set(['ProxyPassMatch', 'ProxyPassReverse', 'ProxyPass'])
+        ret = []
+        hosts = set()
+        for line in lines:
+            tokens = filter(lambda t: t.strip(), line.strip().split(' '))
+            if not tokens or tokens[0] not in rules:
+                continue
+            assert len(tokens) == 3
+            # for token in tokens:
+            # print(token)
+            f = urlsplit(tokens[2])
+            ret.append((tokens[0], tokens[1], f.path))
+            hosts.add(f.netloc)
+        if check_host and len(hosts) > 1:
+            print('WARNING: found multiple target hosts: {hosts}'.format(
+                hosts=', '.join(hosts)))
+        return set(ret)
diff --git a/ckanext/odsh/tests/test_harvest.py b/ckanext/odsh/tests/test_harvest.py
index 5b6311a896c645aa0d6b58c64a51a8d66373965f..ff7df9f46fbbbaf5b2e6d871183f3a47ee1444e0 100644
--- a/ckanext/odsh/tests/test_harvest.py
+++ b/ckanext/odsh/tests/test_harvest.py
@@ -1,4 +1,5 @@
 from ckanext.odsh.tests.test_helpers import AppProxy
+import ckanext.odsh.tests.test_helpers as testhelpers
 import ckan.tests.factories as factories
 import uuid
 import pdb
@@ -9,18 +10,19 @@ import subprocess
 
 class TestHarvest:
 
-    def _create_harvester(self):
+    def _create_harvester(self, source_type):
         guid = str(uuid.uuid4())
-        self.org = factories.Organization(
-            name="test_harvest_org_" + guid,
-            users=[{'name': 'ckanuser', 'capacity': 'admin'}]
-        )
+        # self.org = factories.Organization(
+        #     name="test_harvest_org_" + guid,
+        #     users=[{'name': 'ckanuser', 'capacity': 'admin'}]
+        # )
         self._get_app().login()
         response = self.app.get('/harvest/new')
         form = response.forms[0]
         title = 'harvest_test_source_' + guid
         form['title'] = title
         form['url'] = "http://localhost:5002/" + guid 
+        form['source_type'] = source_type 
         final_response = self.app.submit_form(form)
         # submit_response = self.app.submit_form(form)
         # assert 'missing value' in submit_response
@@ -32,7 +34,7 @@ class TestHarvest:
 
     def test_harvest_dcat(self):
         # Arrange
-        harvester = self._create_harvester()
+        harvester = self._create_harvester('dcat_rdf')
         harvest_sever_mock.data = self._load_rdf_catalog()
         server = HarvestServerMock()
         server.start()
@@ -41,7 +43,7 @@ class TestHarvest:
 
     def run_harvest(self, harvester):
         out = subprocess.check_output([
-            "paster", "--plugin=ckanext-harvest", "harvester", "run_test", harvester,   "--config=/etc/ckan/default/development.ini"])
+            "paster", "--plugin=ckanext-harvest", "harvester", "run_test", harvester,   '--config='+testhelpers.getConfigPath()])
 
     def _get_app(self):
         if not hasattr(self, 'app'):
@@ -50,7 +52,6 @@ class TestHarvest:
         return self.app
 
     def _load_rdf_catalog(self):
-        # with open('ckanext/odsh/tests/rdf_catalog.xml', 'r') as rdffile:
-        with open('ckanext/odsh/tests/rdf_catalog_empty.xml', 'r') as rdffile:
+        with open('ckanext/odsh/tests/rdf_catalog.xml', 'r') as rdffile:
             data = rdffile.read()
             return data
diff --git a/ckanext/odsh/tests/test_helpers.py b/ckanext/odsh/tests/test_helpers.py
index cbfa4f8236440e49caa80b2d1e640c022fbd4da5..678af9b055dc1e7afd1479375226c3c22c98e955 100644
--- a/ckanext/odsh/tests/test_helpers.py
+++ b/ckanext/odsh/tests/test_helpers.py
@@ -4,6 +4,7 @@ import functools
 from ckan.common import config
 import ckan.config.middleware
 import ckan.tests.helpers as helpers
+import sys
 
 
 def odsh_test():
@@ -38,13 +39,27 @@ def _get_test_app():
     return app
 
 
+def getConfigPath():
+    path = None
+    for a in sys.argv:
+        if a.startswith('--with-pylons'):
+            path = a.split('=')[1]
+            break
+    assert path, 'could not find config parameter'
+    return path
+
 class AppProxy:
     def login(self):
         app = _get_test_app()
         response = app.get('/user/login')
         login_form = response.forms[0]
-        login_form['login'] = 'ckanuser'
-        login_form['password'] = 'pass'
+
+        user = config.get('ckanext.odsh.testuser', None)
+        assert user 
+        password = config.get('ckanext.odsh.testuserpass', None)
+        assert password 
+        login_form['login'] = user
+        login_form['password'] = password
         submit_response = login_form.submit('save')
         final_response = helpers.webtest_maybe_follow(submit_response)
         self.app = app
diff --git a/ckanext/odsh/tests/test_rdfexport.py b/ckanext/odsh/tests/test_rdfexport.py
index 89bc093b981291aff2a2ddffcddcc4641f53927a..7b2c58490c2d27f5a499689815d4f8a2d2099b36 100644
--- a/ckanext/odsh/tests/test_rdfexport.py
+++ b/ckanext/odsh/tests/test_rdfexport.py
@@ -9,6 +9,8 @@ import urllib2
 import ckan.tests.helpers as helpers
 from ckan.common import config
 import ckan.config.middleware
+from ckanext.dcatde.profiles import DCATDE, DCAT, DCATDE_1_0
+import pdb
 
 
 # run with nosetests --ckan --nologcapture --with-pylons=<config to test> ckanext/odsh/tests/test_routes.py
@@ -17,7 +19,6 @@ DCAT = Namespace("http://www.w3.org/ns/dcat#")
 DCT = Namespace("http://purl.org/dc/terms/")
 
 
-
 def _get_test_app():
     app = ckan.config.middleware.make_app(config['global_conf'], **config)
     app = helpers.CKANTestApp(app)
@@ -44,9 +45,10 @@ class TestRDFExport:
                                     issued='27-01-2000',
                                     extras=extras,
                                     owner_org='test',
-                                    license_id="http://dcat-ap.de/def/licenses/dl-by-de/2.0")
+                                    license_id="http://dcat-ap.de/def/licenses/dl-by-de/2.0",
+                                    licenseAttributionByText='foo')
         factories.Resource(
-            package_id=dataset['id'], license=dataset['license_id'])
+            package_id=dataset['id'], license=dataset['license_id'], licenseAttributionByText='foo')
         factories.Resource(
             package_id=dataset['id'])
 
@@ -54,9 +56,25 @@ class TestRDFExport:
         response = self._get_app().get('/dataset/'+dataset['name']+'.rdf')
         g.parse(data=response.body)
         lic = self._extract_licenses(g)
+        att = self._extract_licenseAttributions(g)
 
         assert len(lic) == 3
+        assert len(att) == 3
         assert len(set([str(l) for l in lic])) == 1
+        assert len(set([str(a) for a in att])) == 1
+        assert str(att[0]) == 'text'
+
+    def test_catalog(self):
+        g = rdflib.Graph()
+        response = self._get_app().get('/catalog.xml')
+        g.parse(data=response.body)
+        datasets = list(g.subjects(RDF.type, DCAT.Dataset))
+
+        response = self._get_app().get('/api/3/action/package_search')
+        plist = json.loads(response.body)
+
+        assert len(datasets) == plist['result']['count'], "{rdf} != {cat}".format(
+            rdf=len(datasets), cat=plist['result']['count'])
 
     def _get_app(self):
         if not hasattr(self, 'app'):
@@ -78,3 +96,20 @@ class TestRDFExport:
                 ret.append(l)
 
         return ret
+
+    def _extract_licenseAttributions(self, g):
+
+        datasets = list(g.subjects(RDF.type, DCAT.Dataset))
+        assert len(datasets) == 1
+        dataset = datasets[0]
+
+        ret = []
+        for namespace in [DCATDE, DCATDE_1_0]:
+            ret += list(g.objects(dataset, namespace.licenseAttributionByText))
+
+        distributions = list(g.objects(dataset, DCAT.distribution))
+        for d in distributions:
+            for namespace in [DCATDE, DCATDE_1_0]:
+                ret += list(g.objects(d, namespace.licenseAttributionByText))
+
+        return ret