diff --git a/CHANGELOG.md b/CHANGELOG.md
index 82ebf0d5cf226bd796b2e54d651133134f285fa4..9aa795d453dbda0519c7fb4d2bead44ad68f6ac3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -20,7 +20,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 
 ### Removed
 
-- Support for Python2 removed.
+- Dropped support for Python 2.
+- Eliminated ICAP support.
+- Removed master and slave operation support.
 
 ## [1.4.3] - 2022-11-03
 
diff --git a/ckanext/odsh/helpers.py b/ckanext/odsh/helpers.py
index 0dac0cfba4bc6eab333cd2d2eacb1a72568c6ad3..785e9c2d0b46ec8ed7c5bbd3e76eb3b6fd6d1fa7 100644
--- a/ckanext/odsh/helpers.py
+++ b/ckanext/odsh/helpers.py
@@ -250,6 +250,7 @@ def odsh_public_resource_url(res):
     else:
         return res['url']
 
+
 def odsh_get_version_id():
     try:
         home = config.get('ckanext.odsh.home', None)
@@ -262,15 +263,10 @@ def odsh_get_version_id():
         return 'unknown'
     return 'unknown'
 
+
 def odsh_show_testbanner():
     return config.get('ckanext.odsh.showtestbanner', 'False') == 'True'
 
-def odsh_is_slave():
-    c = config.get('ckanext.odsh.slave', None)
-    if c is None or (c != 'True' and c != 'False'):
-        return -1 
-    return 1 if c == 'True' else 0
-
 
 def odsh_get_facet_items_dict(name, limit=None):
     '''
diff --git a/ckanext/odsh/lib/odsh_icap_client.py b/ckanext/odsh/lib/odsh_icap_client.py
deleted file mode 100644
index fac8b274deefe32dc907048a08231e1514590cb3..0000000000000000000000000000000000000000
--- a/ckanext/odsh/lib/odsh_icap_client.py
+++ /dev/null
@@ -1,205 +0,0 @@
-import socket
-import sys
-import time
-import logging
-from ckan.common import config
-import ckan.plugins.toolkit as toolkit
-
-log = logging.getLogger(__name__)
-
-def _read_from_config(key):
-    value = config.get(key, None)
-    if value is None:
-        _raise_KeyError_if_not_in_config(key)
-    return value
-
-def _raise_KeyError_if_not_in_config(key):
-    raise KeyError('key {} is not defined in ckan config file.'.format(key))
-
-
-class ODSHICAPRequest(object):
-
-    def __init__(self, FILENAME, FILEBUFF):
-        try:
-            self.HOST = _read_from_config('ckanext.odsh.icap.host')
-            self.PORT = toolkit.asint(_read_from_config('ckanext.odsh.icap.port'))
-            self.CLIENTIP = _read_from_config('ckanext.odsh.icap.clientip')
-        except KeyError as e:
-            log.error(e)
-        self.FILENAME = FILENAME
-        self.FILEBUFF = FILEBUFF
-    
-    def send(self):
-        print("----- Starting ICAP-Request via RESPMOD -----")
-
-        # socket connect
-        try:
-            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        except socket.error as msg:
-            sys.stderr.write("[ERROR] %s\n" % msg[1])
-            sys.exit(1)
-
-        try:
-            sock.connect((self.HOST, self.PORT))
-        except socket.error as msg:
-            sys.stderr.write("[ERROR] %s\n" % msg[1])
-            sys.exit(2)
-
-        # create and send header
-        header = self._get_icap_header(self.FILENAME, self.HOST, self.PORT, self.CLIENTIP).encode()
-        sock.send(header)
-
-        # send file and terminating signal
-        self._sendfile(self.FILEBUFF, sock)
-        sock.send('0\r\n\r\n')
-
-        # fetch and parse the response
-        data_response = self._recvall(sock)
-        response_object = self._parse_response(data_response)
-
-        print("----- Finished ICAP-Request via RESPMOD -----")
-
-        return response_object
-
-    def _get_icap_header(self, fileName, host, port, clientIP):
-        uniqueInt = time.time() # used to generate "unique" int for disabling cache
-    
-        icapRequest = 'RESPMOD' + ' ' + 'icap://' + host + ':' + str(port) + '/RESPMOD' + \
-                      ' ICAP/1.0\r\n' + 'Host: ' + host + ':' + str(port) + '\r\n'
-        icapRequest += 'Allow: 204\r\n'
-        icapRequest += 'X-Client-IP: ' + clientIP + '\r\n'
-    
-        httpRequest = "GET http://" + clientIP + "/" + str(uniqueInt).replace('.', '_') + "/" + \
-                      fileName + ' HTTP/1.1\r\nHost: ' + clientIP + '\r\n\r\n'
-
-        httpResponse = 'HTTP/1.1 200 OK\r\n'
-        httpResponse += 'Transfer-Encoding: chunked\r\n'
-        httpResponse += '\r\n'
-
-        httpRequestLength = len(httpRequest)
-        httpResponseLength = len(httpResponse)
-
-        icapRequest += 'Encapsulated: req-hdr=0, res-hdr=' + str(httpRequestLength) + ', res-body=' + \
-                       str(httpRequestLength + httpResponseLength) + '\r\n\r\n' + httpRequest + httpResponse;
-
-        return icapRequest
-
-    def _sendfile(self, fileBuffer, sock):
-        print('start sending file')
-        PACK_SIZE = 1024 # in bytes
-
-        l = fileBuffer.read(PACK_SIZE)
-        while(l):
-            print(('sending %d bytes of data...' % len(l)))
-            sock.send('{:02X}'.format(len(l)).encode())
-            sock.send("\r\n".encode())
-            sock.send(l)
-            sock.send("\r\n".encode())
-            l = fileBuffer.read(PACK_SIZE)
-    
-    def _recvall(self, sock):
-        print('receiving response from icap server')
-        BUFF_SIZE = 4096 # 4 KiB
-        data = b''
-        while True:
-            part = sock.recv(BUFF_SIZE)
-            data += part
-            if len(part) < BUFF_SIZE:
-                # either 0 or end of data
-                break
-        return data
-
-    def _parse_response(self, data_response):
-        print('parsing response')
-        lines = data_response.split('\r\n')
-        http_status_code = self._parse_response_http_statuscode(lines)
-        http_block = self._parse_block(lines, 'HTTP/1.1')
-        icap_block = self._parse_block(lines, 'ICAP/1.0')
-
-        response_object = ODSHParsedICAPResponse(data_response, http_status_code, http_block, icap_block)
-        return response_object
-
-    def _parse_response_http_statuscode(self, data_response_lines):
-        http_status_code_found = False
-        http_status_code = None
-        for line in data_response_lines:
-            if line.startswith('HTTP/1.1'):
-                http_status_code = int(line.split(' ')[1]) # example: HTTP/1.1 403 VirusFound
-                http_status_code_found = True
-        
-        if not http_status_code_found:
-            http_status_code = 200 # if no virus is found, no http_status_code is given, defaulting to 200 OK
-        
-        return http_status_code
-
-    def _parse_block(self, data_response_lines, block_start_signal):
-        block_data = None
-        in_block = False
-
-        for line in data_response_lines:
-            if line.startswith(block_start_signal):
-                in_block = True
-                block_data = ''
-            if in_block and not len(line):
-                in_block = False
-                break
-            if in_block:
-                block_data += line + '\r\n'
-
-        return block_data
-            
-
-class ODSHParsedICAPResponse(object):
-
-    def __init__(self, full_response, http_status_code, http_block, icap_block):
-        self.full_response = full_response
-        self.http_status_code = http_status_code
-        self.http_block = http_block
-        self.icap_block = icap_block
-
-    def virus_found(self):
-        if (self.http_status_code != 200) and (self.http_status_code != 403):
-            raise UnknownResponseException('Received an unknown http response code: %d' % self.http_status_code)
-        return self.http_status_code != 200
-
-
-class UnknownResponseException(Exception):
-    pass
-    
-
-def example_print_response(response_object):
-    print('')
-    print('Example output of response_object:')
-    print('')
-
-    #print('Full ICAP-Response: ')
-    #print(response_object.full_response)
-    #print('')
-
-    print('HTTP-Status-Code (explicit or implied):')
-    print((response_object.http_status_code))
-    print('')
-    
-    print('HTTP-Block:')
-    print((response_object.http_block))
-    print('')
-
-    print('ICAP-Block:')
-    print((response_object.icap_block))
-    print('')
-
-    print('Virus found?')
-    print((response_object.virus_found()))
-    print('')
-
-        
-if __name__ == "__main__":
-
-    # example file with virus
-    FILENAME = 'test_files/eicar.txt'
-
-    # example file without virus
-    #FILENAME = 'test_files/lorem-ipsum.pdf'
-
-    odsh_parsed_icap_response = ODSHICAPRequest(FILENAME).send()
-    example_print_response(odsh_parsed_icap_response)
diff --git a/ckanext/odsh/lib/uploader.py b/ckanext/odsh/lib/uploader.py
index 144a64ae2bf8b16e98e43507f3ed896f184aba8c..3ef936af83fb3b167f0098a34bd8d8f95ebc72d9 100644
--- a/ckanext/odsh/lib/uploader.py
+++ b/ckanext/odsh/lib/uploader.py
@@ -3,31 +3,11 @@ from ckan.lib.uploader import ResourceUpload, Upload
 import ckan.plugins.toolkit as toolkit
 import ckan.plugins.toolkit as tk
 
-from .odsh_icap_client import ODSHICAPRequest
 import logging
 import hashlib
 
 log = logging.getLogger(__name__)
 
-
-def _icap_virus_found(filename, upload_file):
-    # the flag skip_icap_virus_check in can be used during development
-    skip_icap_virus_check = toolkit.asbool(
-        tk.config.get('ckanext.odsh.skip_icap_virus_check', 'False')
-    )
-    if skip_icap_virus_check:
-        log.debug("WARNING: icap virus check skipped, remove parameter ckanext.odsh.skip_icap_virus_check from ckan's ini file")
-        return False
-    if filename and upload_file:
-        response_object = ODSHICAPRequest(filename, upload_file).send()
-        return response_object.virus_found()
-
-
-def _raise_validation_error_if_virus_found(filename, upload_file):
-    if _icap_virus_found(filename, upload_file):
-        raise logic.ValidationError({'upload': ['Virus gefunden']})
-
-
 def calculate_hash(upload_file):
     upload_file.seek(0)
     hash_md5 = hashlib.md5()
@@ -53,7 +33,6 @@ class ODSHResourceUpload(ResourceUpload):
         log.debug("Resource({}) uploaded.".format(resource))
         super(ODSHResourceUpload, self).__init__(resource)
         if hasattr(self, 'filename') and hasattr(self, 'upload_file'):
-            _raise_validation_error_if_virus_found(self.filename, self.upload_file)
             _raise_validation_error_if_hash_values_differ(self.upload_file, resource)
 
 
@@ -72,6 +51,4 @@ class ODSHUpload(Upload):
         super(ODSHUpload, self).update_data_dict(data_dict, url_field, file_field, clear_field)
     
     def upload(self, max_size=2):
-        if hasattr(self, 'filename') and hasattr(self, 'upload_file'):
-            _raise_validation_error_if_virus_found(self.filename, self.upload_file)
         super(ODSHUpload, self).upload(max_size)
diff --git a/ckanext/odsh/plugin.py b/ckanext/odsh/plugin.py
index 0539798a3ffa267a334a220b717342b924e84545..45341ea9a59622cdbd8b81222ab1b223951534cf 100644
--- a/ckanext/odsh/plugin.py
+++ b/ckanext/odsh/plugin.py
@@ -325,7 +325,6 @@ class OdshPlugin(plugins.SingletonPlugin, DefaultTranslation, DefaultDatasetForm
                 'odsh_public_resource_url': odsh_helpers.odsh_public_resource_url,
                 'odsh_get_version_id': odsh_helpers.odsh_get_version_id,
                 'odsh_show_testbanner': odsh_helpers.odsh_show_testbanner,
-                'odsh_is_slave': odsh_helpers.odsh_is_slave,
                 'tpsh_get_daterange_prettified': helper_pkg_dict.get_daterange_prettified,
                 'tpsh_get_language_of_package': helpers_tpsh.get_language_of_package,
                 'get_language_icon': helpers_tpsh.get_language_icon,
diff --git a/ckanext/odsh/plugin_odsh_icap.py b/ckanext/odsh/plugin_odsh_icap.py
deleted file mode 100644
index d6462db4af0e09a46a2ac900a91dc923cb59728b..0000000000000000000000000000000000000000
--- a/ckanext/odsh/plugin_odsh_icap.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import ckan.plugins as plugins
-from ckanext.odsh.lib.uploader import ODSHResourceUpload, ODSHUpload
-
-class OdshIcapPlugin(plugins.SingletonPlugin):
-    plugins.implements(plugins.IUploader, inherit=True)
-
-    def get_resource_uploader(self, data_dict):
-        return ODSHResourceUpload(data_dict)
-    
-    def get_uploader(self, upload_to, old_filename):
-        return ODSHUpload(upload_to, old_filename)
\ No newline at end of file
diff --git a/ckanext/odsh/precondition.py b/ckanext/odsh/precondition.py
index eda5a961a5a8d2e555beebe0e28c59283d1032a0..5314133f7735723645ebc99ebc8bf2f19a0f94eb 100644
--- a/ckanext/odsh/precondition.py
+++ b/ckanext/odsh/precondition.py
@@ -3,14 +3,3 @@ import ckan.plugins.toolkit as tk
 class PreconditionViolated(Exception):
     def __init__(self, message):
         super(PreconditionViolated, self).__init__(message)
-
-
-def not_on_slave(func):
-    def wrapped(*args, **kwargs):
-        if tk.config.get('ckanext.odsh.slave', False):
-            raise PreconditionViolated('not allowed on slave')
-        return func(*args, **kwargs)
-
-    if tk.config.get('ckanext.odsh.debug', False):
-        return wrapped
-    return func
diff --git a/ckanext/odsh/templates/base.html b/ckanext/odsh/templates/base.html
index 5e2a2ac29dff6458256ebdfad63f216a772b6952..fdc7cbe465311eb0508c71e8c0f79fb684fec1ee 100644
--- a/ckanext/odsh/templates/base.html
+++ b/ckanext/odsh/templates/base.html
@@ -9,10 +9,10 @@
 {% endblock styles %}
 
 {% block head_extras %}
-{{ super() }}
-<meta data-name="type" content="{{h.odsh_is_slave()}}">
-
+  {{ super() }}
 {% endblock %}
+
 {% block bodytag %} data-site-root="{{ h.odsh_public_url() }}" data-locale-root="{{ h.odsh_public_url() }}" {% endblock %}
+
 {% block page %}
 {% endblock %}
diff --git a/ckanext/odsh/tests/ckan_selenium.py b/ckanext/odsh/tests/ckan_selenium.py
index 3bacd10f910c67790f5b4ea5a5e2ca98f3dda215..239f35b1c76cce6293a059ef6fa02ba356b24416 100644
--- a/ckanext/odsh/tests/ckan_selenium.py
+++ b/ckanext/odsh/tests/ckan_selenium.py
@@ -52,9 +52,6 @@ class SeleniumCkanApp:
     def close():
         driver.close()
 
-    def get_slave_flag(self):
-        return self.findElementByXPath("//meta[@data-name='type']").get_attribute("content")
-
     def findElementByXPath(self, xpath):
         return self.driver.find_element(By.XPATH, xpath)
 
@@ -88,7 +85,3 @@ class SeleniumCkanApp:
 
     def currentUrl(self):
         return self.driver.current_url
-
-    def onMaster(self):
-        cont = self.get_slave_flag()
-        return cont == '0'
diff --git a/ckanext/odsh/tests/test_env.py b/ckanext/odsh/tests/test_env.py
index 978fdfde07442c0316988a09c9da9a487b840590..70e2bf126e9ac42b275aff07b190fbd271ca2e76 100644
--- a/ckanext/odsh/tests/test_env.py
+++ b/ckanext/odsh/tests/test_env.py
@@ -12,8 +12,6 @@ from urllib.parse import urlsplit
 
 expected_commit = '8cd9576884cae6abe50a27c891434cb9fe87ced2'
 
-# run with nosetests --ckan --nologcapture --with-pylons=<config to test> ckanext/odsh/tests/test_env.py
-
 
 def checkConfig(key, expected=None, minLength=None):
     value = config.get(key, None)
@@ -64,16 +62,6 @@ def checkJsonFile(key, expectedKey=None, expectedLength=None):
             assert len(data) >= expectedLength
 
 
-def isSlave():
-    value = checkConfig('ckanext.odsh.slave')
-    assert value == 'True' or value == 'False'
-    return checkConfig('ckanext.odsh.slave') == 'True'
-
-
-def isMaster():
-    return not isSlave()
-
-
 class MultiOrderedDict(OrderedDict):
     def __setitem__(self, key, value):
         if isinstance(value, list) and key in self:
@@ -113,24 +101,19 @@ class TestEnv:
         checkConfig('ckan.site_intro_text',
                     '#Willkommen auf Open Data Portal Schleswig-Holstein.')
 
-        if isMaster():
-            checkConfigDir('ckan.storage_path')
-            checkConfig('ckanext-archiver.user_agent_string',
-                        'Open Data Schleswig-Holstein')
-            checkConfig('ckan.harvest.mq.type', 'redis')
-
-        if isSlave():
-            checkConfig('ckanext.odsh.upload_formats', minLength=2)
-            checkConfig('ckanext.spatial.search_backend', 'solr-spatial-field')
-            checkConfig('ckanext.spatial.common_map.type', 'wms')
-            checkConfig('ckanext.spatial.common_map.wms.url',
-                        'https://sg.geodatenzentrum.de/wms_webatlasde.light_grau')
-            checkConfig('ckanext.spatial.common_map.wms.layers',
-                        'webatlasde.light_grau')
-
-        if isMaster():
-            checkJsonFile(
-                'qa.resource_format_openness_scores_json', expectedLength=60)
+        checkConfigDir('ckan.storage_path')
+        checkConfig('ckanext-archiver.user_agent_string',
+                    'Open Data Schleswig-Holstein')
+        checkConfig('ckan.harvest.mq.type', 'redis')
+        checkConfig('ckanext.odsh.upload_formats', minLength=2)
+        checkConfig('ckanext.spatial.search_backend', 'solr-spatial-field')
+        checkConfig('ckanext.spatial.common_map.type', 'wms')
+        checkConfig('ckanext.spatial.common_map.wms.url',
+                    'https://sg.geodatenzentrum.de/wms_webatlasde.light_grau')
+        checkConfig('ckanext.spatial.common_map.wms.layers',
+                    'webatlasde.light_grau')
+        checkJsonFile(
+            'qa.resource_format_openness_scores_json', expectedLength=60)
 
         checkConfig('ckanext.odsh.language.mapping',
                     '/usr/lib/ckan/default/src/ckanext-odsh/languages.json')
@@ -140,16 +123,9 @@ class TestEnv:
 
     def test_plugins(self):
         value = config.get('ckan.plugins', [])
-        for p in ['odsh']:
+        for p in ['odsh', 'odsh_dcat_harvest', 'odsh_autocomplete']:
             assert p in value, 'missing plugin:' + p
 
-        if isMaster():
-            for p in ['odsh_icap', 'odsh_dcat_harvest']:
-                assert p in value, 'missing plugin:' + p
-        if isSlave():
-            for p in ['odsh_autocomplete datarequests']:
-                assert p in value, 'missing plugin:' + p
-
         # pdb.set_trace()
 
     def test_licenses(self):
@@ -167,92 +143,92 @@ class TestEnv:
         profiles.resource_formats()
         assert len(profiles._RESOURCE_FORMATS_IMPORT) > 120
 
-    def test_routes(self):
-        if isMaster():
-            return
-
-        expexted_rules = \
-            """ ProxyPass /dataset/new http://10.61.47.219/dataset/new
-    ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
-    ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse /dataset http://141.91.184.90/dataset
-    ProxyPassReverse /dataset http://141.91.184.90/dataset
-    ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
-    ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
-    ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
-    #ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
-    ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
-    ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
-    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
-    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
-    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
-    ProxyPassReverse /harvest http://141.91.184.90/harvest
-    ProxyPass /harvest http://141.91.184.90/harvest
-    ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
-    ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
-    ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
-    ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
-    ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
-    ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
-    ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
-    ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
-    ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1 
-    ProxyPass /organization/new http://<interne-IP-Master>/organization/new
-    ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
-    ProxyPassReverse /organization http://<interne-IP-Master>/organization
-    ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<interne-IP-Master>/$1
-
-    # ProxyPass /datarequest http://10.61.47.219/datarequest
-    # ProxyPassReverse /datarequest http://10.61.47.219/datarequest
-    """
-
-        expected = self._parse_rules(expexted_rules.splitlines())
-
-        # with open('ckan_default.conf', 'r') as aconfig:
-        with open('/etc/apache2/sites-enabled/ckan_default.conf', 'r') as aconfig:
-            lines = aconfig.readlines()
-            # pdb.set_trace()
-            current = self._parse_rules(lines, check_host=True)
-            if len(expected.symmetric_difference(current)) > 0:
-                diff = expected.difference(current)
-                if len(diff) > 0:
-                    print('WARNING: missing routes:')
-                    for r in sorted(diff, key=lambda tup: tup[1]):
-                        print(('{cmd} {source} {target}'.format(
-                            cmd=r[0], source=r[1], target='http://<interne-IP-Master>'+r[2])))
-                diff = current.difference(expected)
-                if len(diff) > 0:
-                    print('WARNING: found unexpected routes:')
-                    for r in sorted(diff, key=lambda tup: tup[1]):
-                        print(('{cmd} {source} {target}'.format(
-                            cmd=r[0], source=r[1], target='<target>'+r[2])))
-
-    def _parse_rules(self, lines, check_host=False):
-        rules = set(['ProxyPassMatch', 'ProxyPassReverse', 'ProxyPass'])
-        ret = []
-        hosts = set()
-        for line in lines:
-            tokens = [t for t in line.strip().split(' ') if t.strip()]
-            if not tokens or tokens[0] not in rules:
-                continue
-            assert len(tokens) == 3
-            # for token in tokens:
-            # print(token)
-            f = urlsplit(tokens[2])
-            ret.append((tokens[0], tokens[1], f.path))
-            hosts.add(f.netloc)
-        if check_host and len(hosts) > 1:
-            print(('WARNING: found multiple target hosts: {hosts}'.format(
-                hosts=', '.join(hosts))))
-        return set(ret)
+    # def test_routes(self):
+    #     if isMaster():
+    #         return
+
+    #     expexted_rules = \
+    #         """ ProxyPass /dataset/new http://10.61.47.219/dataset/new
+    # ProxyPassReverse /dataset/new http://10.61.47.219/dataset/new
+    # ProxyPassMatch ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/delete/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse /dataset http://141.91.184.90/dataset
+    # ProxyPassReverse /dataset http://141.91.184.90/dataset
+    # ProxyPass /dataset/new_resource http://10.61.47.219/dataset/new_resource
+    # ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    # ProxyPassReverse /dataset/new_resource http://141.91.184.90/dataset/new_resource
+    # #ProxyPass /api/i18n/de http://141.91.184.90/api/i18n/de
+    # ProxyPassReverse ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    # ProxyPassMatch ^/uploads/group/(.*)$ http://10.61.47.219/uploads/group/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/download/[^/]+)$ http://141.91.184.90/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource_data/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource_edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(dataset/[^/]+/resource/[^/]+/new_view[^/]*)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(harvest.*)$ http://141.91.184.90/$1
+    # ProxyPassReverse /harvest http://141.91.184.90/harvest
+    # ProxyPass /harvest http://141.91.184.90/harvest
+    # ProxyPassReverse ^/(harvest.*)$ http://141.91.184.90/$1
+    # ProxyPassReverse ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    # ProxyPassMatch ^/(api/3/action/package.*)$ http://10.61.47.219/$1
+    # ProxyPass /api/action/package_create http://10.61.47.219/api/action/package_create
+    # ProxyPassReverse /api/action/package_create http://10.61.47.219/api/action/package_create
+    # ProxyPass /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    # ProxyPassReverse /api/action/resource_create http://10.61.47.219/api/action/resource_create
+    # ProxyPassMatch ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1
+    # ProxyPassReverse ^/(organization/edit/[^/]+)$ http://10.61.47.219/$1 
+    # ProxyPass /organization/new http://<interne-IP-Master>/organization/new
+    # ProxyPassReverse /organization/new http://<interne-IP-Master>/organization/new
+    # ProxyPassReverse /organization http://<interne-IP-Master>/organization
+    # ProxyPassReverse ^/(organization/edit/[^/]+)$ http://<interne-IP-Master>/$1
+
+    # # ProxyPass /datarequest http://10.61.47.219/datarequest
+    # # ProxyPassReverse /datarequest http://10.61.47.219/datarequest
+    # """
+
+    #     expected = self._parse_rules(expexted_rules.splitlines())
+
+    #     # with open('ckan_default.conf', 'r') as aconfig:
+    #     with open('/etc/apache2/sites-enabled/ckan_default.conf', 'r') as aconfig:
+    #         lines = aconfig.readlines()
+    #         # pdb.set_trace()
+    #         current = self._parse_rules(lines, check_host=True)
+    #         if len(expected.symmetric_difference(current)) > 0:
+    #             diff = expected.difference(current)
+    #             if len(diff) > 0:
+    #                 print('WARNING: missing routes:')
+    #                 for r in sorted(diff, key=lambda tup: tup[1]):
+    #                     print(('{cmd} {source} {target}'.format(
+    #                         cmd=r[0], source=r[1], target='http://<interne-IP-Master>'+r[2])))
+    #             diff = current.difference(expected)
+    #             if len(diff) > 0:
+    #                 print('WARNING: found unexpected routes:')
+    #                 for r in sorted(diff, key=lambda tup: tup[1]):
+    #                     print(('{cmd} {source} {target}'.format(
+    #                         cmd=r[0], source=r[1], target='<target>'+r[2])))
+
+    # def _parse_rules(self, lines, check_host=False):
+    #     rules = set(['ProxyPassMatch', 'ProxyPassReverse', 'ProxyPass'])
+    #     ret = []
+    #     hosts = set()
+    #     for line in lines:
+    #         tokens = [t for t in line.strip().split(' ') if t.strip()]
+    #         if not tokens or tokens[0] not in rules:
+    #             continue
+    #         assert len(tokens) == 3
+    #         # for token in tokens:
+    #         # print(token)
+    #         f = urlsplit(tokens[2])
+    #         ret.append((tokens[0], tokens[1], f.path))
+    #         hosts.add(f.netloc)
+    #     if check_host and len(hosts) > 1:
+    #         print(('WARNING: found multiple target hosts: {hosts}'.format(
+    #             hosts=', '.join(hosts))))
+    #     return set(ret)
diff --git a/ckanext/odsh/tests/test_rdfexport.py b/ckanext/odsh/tests/test_rdfexport.py
index 795c767ca8c0fe5a1862c8054aed568403e91934..f288733179dd9201040bd85aea3105b11d519d61 100644
--- a/ckanext/odsh/tests/test_rdfexport.py
+++ b/ckanext/odsh/tests/test_rdfexport.py
@@ -13,8 +13,6 @@ from ckanext.dcatde.profiles import DCATDE, DCAT, DCATDE_1_0
 import pdb
 
 
-# run with nosetests --ckan --nologcapture --with-pylons=<config to test> ckanext/odsh/tests/test_routes.py
-
 DCAT = Namespace("http://www.w3.org/ns/dcat#")
 DCT = Namespace("http://purl.org/dc/terms/")
 
diff --git a/ckanext/odsh/tests/test_routes.py b/ckanext/odsh/tests/test_routes.py
index 59ddeffc288e9a8234008abce426339972ccb1d2..8e35beda87af8083f449e55db93fa67a07df3c01 100644
--- a/ckanext/odsh/tests/test_routes.py
+++ b/ckanext/odsh/tests/test_routes.py
@@ -10,7 +10,6 @@ import ckan.config.middleware
 from routes import url_for
 webtest_submit = helpers.webtest_submit
 
-# run with nosetests --ckan --nologcapture --with-pylons=<config to test> ckanext/odsh/tests/test_routes.py
 
 def _get_test_app():
     app = ckan.config.middleware.make_app(config['global_conf'], **config)
diff --git a/ckanext/odsh/tests/test_selenium.py b/ckanext/odsh/tests/test_selenium.py
index accaea79351b0a11d5ad4fed7c6b9ea69702d528..084e515ba5660ab9665cda186670447007f9d04c 100644
--- a/ckanext/odsh/tests/test_selenium.py
+++ b/ckanext/odsh/tests/test_selenium.py
@@ -24,7 +24,6 @@ class TestSelenium:
     @depends(after=test_login)
     def test_create_dataset(self):
         TestSelenium.app.got_to_url('/dataset/new?group=')
-        # assert TestSelenium.app.onMaster()
 
         guid = str(uuid.uuid4())
         title = 'test_' + guid
@@ -66,5 +65,4 @@ class TestSelenium:
                  ]
         for path in paths:
             TestSelenium.app.got_to_url(path)
-            cont = TestSelenium.app.get_slave_flag()
-            assert cont == '0'
+            assert TestSelenium.app.is_page_loaded()
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 99bcc0ffb174ae961710e9a943b76ed35badccb7..3fc6e68935433827740a7d8127e8f4ab439e809c 100755
--- a/setup.py
+++ b/setup.py
@@ -81,7 +81,6 @@ setup(
     entry_points='''
         [ckan.plugins]
         odsh=ckanext.odsh.plugin:OdshPlugin
-        odsh_icap=ckanext.odsh.plugin_odsh_icap:OdshIcapPlugin
         statistikamtnord_harvester=ckanext.odsh.harvesters:StatistikamtNordHarvester
         kiel_harvester=ckanext.odsh.harvesters:KielHarvester
         odsh_autocomplete=ckanext.odsh.plugin_odsh_autocomplete:OdshAutocompletePlugin
diff --git a/validation.py b/validation.py
deleted file mode 100644
index b7b04835c474e80fe7627c92a6e77a6c506d845c..0000000000000000000000000000000000000000
--- a/validation.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# This Python file uses the following encoding: utf-8
-import logging
-import csv
-import re
-import urllib.request, urllib.error, urllib.parse
-import json
-from itertools import count
-from dateutil.parser import parse
-
-import ckan.plugins.toolkit as toolkit
-import ckan.model as model
-from ckan.lib.navl.dictization_functions import Missing
-
-import pdb
-
-_ = toolkit._
-
-log = logging.getLogger(__name__)
-
-
-def _extract_value(data, field):
-    key = None
-    for k in list(data.keys()):
-        if data[k] == field:
-            key = k
-            break
-    if key is None:
-        return None
-    return data[(key[0], key[1], 'value')]
-
-
-def validate_extra_groups(data, requireAtLeastOne, errors):
-    value = _extract_value(data, 'groups')
-    if value != None:
-        # 'value != None' means the extra key 'groups' was found,
-        # so the dataset came from manual editing via the web-frontend.
-        if not value:
-            if requireAtLeastOne:
-                errors['groups'] = 'at least one group needed'
-            data[('groups', 0, 'id')] = ''
-            return
-
-        groups = [g.strip() for g in value.split(',') if value.strip()]
-        for k in list(data.keys()):
-            if len(k) == 3 and k[0] == 'groups':
-                data[k] = ''
-                # del data[k]
-        if len(groups) == 0:
-            if requireAtLeastOne:
-                errors['groups'] = 'at least one group needed'
-            return
-
-        for num, group in zip(list(range(len(groups))), groups):
-            data[('groups', num, 'id')] = group
-    else:  # no extra-field 'groups'
-        # dataset might come from a harvest process
-        if not data.get(('groups', 0, 'id'), False) and \
-           not data.get(('groups', 0, 'name'), False):
-            errors['groups'] = 'at least one group needed'
-
-
-def validate_extras(key, data, errors, context):
-    extra_errors = {}
-    isStaNord = ('id',) in data and data[('id',)][:7] == 'StaNord'
-
-    validate_extra_groups(data, True, extra_errors)
-    validate_extra_date_new(key, 'issued', data, isStaNord, extra_errors)
-    validate_extra_date_new(key, 'temporal_start',
-                            data, isStaNord, extra_errors)
-    validate_extra_date_new(key, 'temporal_end', data, True, extra_errors)
-
-    if len(list(extra_errors.values())):
-        raise toolkit.Invalid(extra_errors)
-
-
-def _set_value(data, field, value):
-    key = None
-    for k in list(data.keys()):
-        if data[k] == field:
-            key = k
-            break
-    if key is None:
-        return None
-    data[(key[0], key[1], 'value')] = value
-
-
-def validate_extra_date_new(key, field, data, optional, errors):
-    value = _extract_value(data, field)
-
-    if not value:
-        if not optional:
-            errors[field] = 'empty'
-        return
-    else:
-        if re.match(r'\d\d\d\d-\d\d-\d\d', value):
-            try:
-                dt = parse(value)
-                _set_value(data, field, dt.isoformat())
-                return
-            except ValueError:
-                pass
-        errors[field] = 'not a valid date'
-
-
-def validate_licenseAttributionByText(key, data, errors, context):
-    register = model.Package.get_license_register()
-    isByLicense = False
-    for k in data:
-        if len(k) > 0 and k[0] == 'license_id' and data[k] and not isinstance(data[k], Missing) and \
-                'Namensnennung' in register[data[k]].title:
-            isByLicense = True
-            break
-    hasAttribution = False
-    for k in data:
-        if data[k] == 'licenseAttributionByText':
-            if isinstance(data[(k[0], k[1], 'value')], Missing) or (k[0], k[1], 'value') not in data:
-                del data[(k[0], k[1], 'value')]
-                del data[(k[0], k[1], 'key')]
-                break
-            else:
-                value = data[(k[0], k[1], 'value')]
-                hasAttribution = value != ''
-                break
-    if not hasAttribution:
-        current_indexes = [k[1] for k in list(data.keys())
-                           if len(k) > 1 and k[0] == 'extras']
-
-        new_index = max(current_indexes) + 1 if current_indexes else 0
-        data[('extras', new_index, 'key')] = 'licenseAttributionByText'
-        data[('extras', new_index, 'value')] = ''
-
-    if isByLicense and not hasAttribution:
-        raise toolkit.Invalid(
-            'licenseAttributionByText: empty not allowed')
-
-    if not isByLicense and hasAttribution:
-        raise toolkit.Invalid(
-            'licenseAttributionByText: text not allowed for this license')
-
-
-def known_spatial_uri(key, data, errors, context):
-    value = _extract_value(data, 'spatial_uri')
-
-    if not value:
-        poly = None
-
-        # some harvesters might import a polygon directly...
-        # pdb.set_trace()
-        poly = _extract_value(data, 'spatial')
-
-        has_old_uri = False
-        pkg = context.get('package', None)
-        if pkg:
-            old_uri = pkg.extras.get('spatial_uri', None)
-            has_old_uri = old_uri != None and len(old_uri) > 0
-            if not poly:
-                poly = pkg.extras.get('spatial', None)
-        if not poly or has_old_uri:
-            raise toolkit.Invalid('spatial_uri: empty not allowed')
-        else:
-            if poly:
-                new_index = next_extra_index(data)
-                data[('extras', new_index+1, 'key')] = 'spatial'
-                data[('extras', new_index+1, 'value')] = poly
-            return
-
-    mapping_file = tk.config.get('ckanext.odsh.spatial.mapping')
-    try:
-        mapping_file = urllib.request.urlopen(mapping_file)
-    except Exception:
-        raise Exception("Could not load spatial mapping file!")
-
-    not_found = True
-    spatial_text = str()
-    spatial = str()
-    cr = csv.reader(mapping_file, delimiter="\t")
-    for row in cr:
-        if row[0].encode('UTF-8') == value:
-            not_found = False
-            spatial_text = row[1]
-            loaded = json.loads(row[2])
-            spatial = json.dumps(loaded['geometry'])
-            break
-    if not_found:
-        raise toolkit.Invalid(
-            'spatial_uri: uri unknown')
-
-    new_index = next_extra_index(data)
-
-    data[('extras', new_index, 'key')] = 'spatial_text'
-    data[('extras', new_index, 'value')] = spatial_text
-    data[('extras', new_index+1, 'key')] = 'spatial'
-    data[('extras', new_index+1, 'value')] = spatial
-
-
-def next_extra_index(data):
-    current_indexes = [k[1] for k in list(data.keys())
-                       if len(k) > 1 and k[0] == 'extras']
-
-    return max(current_indexes) + 1 if current_indexes else 0
-
-
-def tag_name_validator(value, context):
-    tagname_match = re.compile('[\w \-.\:\(\)\ยด\`]*$', re.UNICODE)
-    if not tagname_match.match(value):
-        raise toolkit.Invalid(_('Tag "%s" must be alphanumeric '
-                                'characters or symbols: -_.:()') % (value))
-    return value
-
-
-def tag_string_convert(key, data, errors, context):
-    '''Takes a list of tags that is a comma-separated string (in data[key])
-    and parses tag names. These are added to the data dict, enumerated. They
-    are also validated.'''
-    if isinstance(data[key], str):
-        tags = [tag.strip()
-                for tag in data[key].split(',')
-                if tag.strip()]
-    else:
-        tags = data[key]
-
-    current_index = max([int(k[1]) for k in list(data.keys())
-                         if len(k) == 3 and k[0] == 'tags'] + [-1])
-
-    for num, tag in zip(count(current_index+1), tags):
-        data[('tags', num, 'name')] = tag
-
-    for tag in tags:
-        toolkit.get_validator('tag_length_validator')(tag, context)
-        tag_name_validator(tag, context)
-
-
-def get_validators():
-    return {
-        'known_spatial_uri': known_spatial_uri,
-        'odsh_tag_name_validator': tag_name_validator,
-        'odsh_validate_extras': validate_extras,
-        'validate_licenseAttributionByText': validate_licenseAttributionByText
-    }