diff --git a/pom.xml b/pom.xml
index 555f5057abcf2f45982a131696f79dbf69672a55..8fc7d1fd967d1c09c026830d80d888ecccdafee9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -11,7 +11,7 @@
 
     <groupId>de.landsh.opendata</groupId>
     <artifactId>dcat-catalog-proxy</artifactId>
-    <version>0.1-SNAPSHOT</version>
+    <version>1.1</version>
     <name>dcat-catalog-proxy</name>
     <description>DCAT catalog proxy</description>
 
diff --git a/src/main/java/de/landsh/opendata/catalogproxy/CatalogFilter.java b/src/main/java/de/landsh/opendata/catalogproxy/CatalogFilter.java
index 67e90bbc68d17b21c08b7884361e1dff64767e6f..de99c93abbf5237fef48d568947dc8c54b337934 100644
--- a/src/main/java/de/landsh/opendata/catalogproxy/CatalogFilter.java
+++ b/src/main/java/de/landsh/opendata/catalogproxy/CatalogFilter.java
@@ -43,11 +43,11 @@ public class CatalogFilter implements InitializingBean {
         this.baseURL = baseURL;
     }
 
-    Model work(InputStream in) {
+    Model work(InputStream inputStream) {
         final Model model = ModelFactory.createDefaultModel();
 
         RDFParser.create()
-                .source(in)
+                .source(new FilterInvalidRDF(inputStream))
                 .lang(RDFLanguages.RDFXML)
                 .errorHandler(ErrorHandlerFactory.errorHandlerStrict)
                 .base(baseURL)
@@ -57,7 +57,7 @@ public class CatalogFilter implements InitializingBean {
 
         final ResIterator it = model.listSubjectsWithProperty(RDF.type, DCAT.Dataset);
         while (it.hasNext()) {
-            Resource dataset = it.next();
+            final Resource dataset = it.next();
             if (hasAtLeastOneValidDistribution(dataset) || isCollection(dataset)) {
                 usedDistributionIds.addAll(getDistributionsForDataset(dataset));
             } else {
@@ -268,13 +268,14 @@ public class CatalogFilter implements InitializingBean {
 
         while (it.hasNext()) {
             final Statement next = it.next();
-
             final Resource distribution = next.getObject().asResource();
-            final RDFNode format = distribution.getProperty(DCTerms.format).getObject();
-            if (!UNWANTED_FORMATS.contains(format)) {
-                atLeastOneValidFormat = true;
+            final Statement formatStatement = distribution.getProperty(DCTerms.format);
+            if (formatStatement != null) {
+                final RDFNode format = formatStatement.getObject();
+                if (!UNWANTED_FORMATS.contains(format)) {
+                    atLeastOneValidFormat = true;
+                }
             }
-
         }
 
         return atLeastOneValidFormat;
diff --git a/src/main/java/de/landsh/opendata/catalogproxy/FilterInvalidRDF.java b/src/main/java/de/landsh/opendata/catalogproxy/FilterInvalidRDF.java
new file mode 100644
index 0000000000000000000000000000000000000000..3507515d635e90dc1a69d4e7143d0fb74c66f894
--- /dev/null
+++ b/src/main/java/de/landsh/opendata/catalogproxy/FilterInvalidRDF.java
@@ -0,0 +1,105 @@
+package de.landsh.opendata.catalogproxy;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+
+public class FilterInvalidRDF extends InputStream {
+
+    private static final String RDF_ABOUT_DOUBLE_QUOTE = "rdf:about=\"";
+    private static final String RDF_ABOUT_SINGLE_QUOTE = "rdf:about='";
+    private final BufferedReader reader;
+    boolean initialized = false;
+    private byte[] currentLine = null;
+    private int index = 0;
+
+    public FilterInvalidRDF(InputStream inputStream) {
+        reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
+    }
+
+    /**
+     * Replace invalid information, such as IRIs with spaces and unescaped ampersand characters.
+     */
+    static String filterLine(String line) {
+        if (line == null) return null;
+
+        if (line.contains("&")) {
+            String[] fragments = line.split("&");
+            for (int i = 1; i < fragments.length; i++) {
+                String fragment = fragments[i];
+                if( !(fragment.startsWith("#") || fragment.startsWith("amp") || fragment.startsWith("apos")
+                || fragment.startsWith("quot") || fragment.startsWith("lt")|| fragment.startsWith("gt"))) {
+                    // invalid character entity reference
+                    fragments[i] = "amp;"+ fragment;
+                }
+            }
+            line = StringUtils.join(fragments,"&");
+        }
+
+        String before = null;
+        String iri = null;
+        String after = null;
+        if (line.contains(RDF_ABOUT_DOUBLE_QUOTE)) {
+            before = StringUtils.substringBefore(line, RDF_ABOUT_DOUBLE_QUOTE) + RDF_ABOUT_DOUBLE_QUOTE;
+            iri = StringUtils.substringBetween(line, RDF_ABOUT_DOUBLE_QUOTE, "\"");
+            after = "\"" + StringUtils.substringAfter(StringUtils.substringAfter(line, RDF_ABOUT_DOUBLE_QUOTE), "\"");
+        } else if (line.contains(RDF_ABOUT_SINGLE_QUOTE)) {
+            before = StringUtils.substringBefore(line, RDF_ABOUT_DOUBLE_QUOTE) + RDF_ABOUT_SINGLE_QUOTE;
+            iri = StringUtils.substringBetween(line, RDF_ABOUT_DOUBLE_QUOTE, "'");
+            after = "'" + StringUtils.substringAfter(StringUtils.substringAfter(line, RDF_ABOUT_DOUBLE_QUOTE), "'");
+        }
+
+        if (iri == null) {
+            return line;
+        } else {
+            final String fixedIRI = iri.replaceAll(" ", "%20");
+            return before + fixedIRI + after;
+        }
+    }
+
+    private void readNextLine() throws IOException {
+        final String line = reader.readLine();
+        if (line == null) {
+            currentLine = null;
+        } else {
+            currentLine = filterLine(line).getBytes(StandardCharsets.UTF_8);
+        }
+
+        index = 0;
+    }
+
+    @Override
+    public int read() throws IOException {
+        if (!initialized) {
+            initialized = true;
+            readNextLine();
+
+        }
+
+        if (currentLine == null) {
+            return -1;
+        }
+
+        // insert a newline character at the end of each line
+        if (index == currentLine.length) {
+            index++;
+            return '\n';
+        }
+
+        if (index > currentLine.length) {
+            readNextLine();
+        }
+
+        if (currentLine == null) {
+            return -1;
+        }
+
+        byte result = currentLine[index];
+        index++;
+        return result;
+    }
+}
diff --git a/src/test/java/de/landsh/opendata/catalogproxy/CatalogFilterTest.java b/src/test/java/de/landsh/opendata/catalogproxy/CatalogFilterTest.java
index 3578986be7d43acc5a543bde77c56a902b059ad2..3c8b21f13b1c32cd666fd4c36dc3121073080ff5 100644
--- a/src/test/java/de/landsh/opendata/catalogproxy/CatalogFilterTest.java
+++ b/src/test/java/de/landsh/opendata/catalogproxy/CatalogFilterTest.java
@@ -203,4 +203,14 @@ public class CatalogFilterTest {
 
         inputStream.close();
     }
+
+    /**
+     * CKAN produces catalog.xml documents with invalid IRIs and invalid XML content (ampersand not escaped). The
+     * catalog proxy must be able to cope with this.
+     */
+    @Test
+    public void work_invalid_iri() {
+        final InputStream inputStream = getClass().getResourceAsStream("/invalid_iri.xml");
+        final Model model = catalogFilter.work(inputStream);
+    }
 }
diff --git a/src/test/java/de/landsh/opendata/catalogproxy/FilterInvalidRDFTest.java b/src/test/java/de/landsh/opendata/catalogproxy/FilterInvalidRDFTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..f188d81d1eb382da80dd3f0f1c72481afb5dc574
--- /dev/null
+++ b/src/test/java/de/landsh/opendata/catalogproxy/FilterInvalidRDFTest.java
@@ -0,0 +1,64 @@
+package de.landsh.opendata.catalogproxy;
+
+import org.apache.commons.io.IOUtils;
+import org.junit.jupiter.api.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+
+public class FilterInvalidRDFTest {
+    @Test
+    public void read_unmodified() throws IOException {
+        String expectedResult = IOUtils.toString(getClass().getResourceAsStream("/catalog.xml"), StandardCharsets.UTF_8);
+
+        InputStream inputStream = new FilterInvalidRDF(getClass().getResourceAsStream("/catalog.xml"));
+        String result = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
+
+        assertEquals(expectedResult, result);
+    }
+
+    @Test
+    public void read_iri_with_space() throws IOException {
+        final String invalidIRI = "\"https://opendata.schleswig-holstein.de/dataset/automatische-zahlstelle-neustadt i. h.-süd-2012\"";
+        final String correctedIRI = "\"https://opendata.schleswig-holstein.de/dataset/automatische-zahlstelle-neustadt%20i.%20h.-süd-2012\"";
+
+        final String invalidURL = "https://www.bast.de/DE/Verkehrstechnik/Fachthemen/v2-verkehrszaehlung/Aktuell/zaehl_aktuell_node.html?nn=1819516&cms_detail=1105&cms_map=0";
+        final String correctedURL = "https://www.bast.de/DE/Verkehrstechnik/Fachthemen/v2-verkehrszaehlung/Aktuell/zaehl_aktuell_node.html?nn=1819516&amp;cms_detail=1105&amp;cms_map=0";
+
+        String expectedResult = IOUtils.toString(getClass().getResourceAsStream("/invalid_iri.xml"), StandardCharsets.UTF_8)
+                .replace(invalidIRI,correctedIRI)
+                .replace(invalidURL, correctedURL);
+
+        InputStream inputStream = new FilterInvalidRDF(getClass().getResourceAsStream("/invalid_iri.xml"));
+        String result = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
+
+        assertEquals(expectedResult, result);
+    }
+
+    @Test
+    public void filterLine_unmodified() {
+        // edge cases
+        assertEquals("", FilterInvalidRDF.filterLine(""));
+        assertNull(FilterInvalidRDF.filterLine(null));
+
+        // unmodified
+        assertEquals("abc&quot;def", FilterInvalidRDF.filterLine("abc&quot;def"));
+        assertEquals("&quot;", FilterInvalidRDF.filterLine("&quot;"));
+        assertEquals(" &quot; ", FilterInvalidRDF.filterLine(" &quot; "));
+        assertEquals(" &amp; ", FilterInvalidRDF.filterLine(" &amp; "));
+        assertEquals(" &#20; ", FilterInvalidRDF.filterLine(" &#20; "));
+        assertEquals(" &apos; ", FilterInvalidRDF.filterLine(" &apos; "));
+        assertEquals(" &lt; ", FilterInvalidRDF.filterLine(" &lt; "));
+        assertEquals(" &gt; ", FilterInvalidRDF.filterLine(" &gt; "));
+    }
+
+    @Test
+    public void filterLine_invalid_xml_entity() {
+        assertEquals("?nn=1819516&amp;cms_detail=1105&amp;cms_map=0", FilterInvalidRDF.filterLine("?nn=1819516&cms_detail=1105&cms_map=0"));
+
+    }
+}
diff --git a/src/test/resources/invalid_iri.xml b/src/test/resources/invalid_iri.xml
new file mode 100644
index 0000000000000000000000000000000000000000..297e96d9a5f346adaadba3c84c265767eadaf5a9
--- /dev/null
+++ b/src/test/resources/invalid_iri.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="utf-8"?>
+<rdf:RDF
+  xmlns:foaf="http://xmlns.com/foaf/0.1/"
+  xmlns:locn="http://www.w3.org/ns/locn#"
+  xmlns:hydra="http://www.w3.org/ns/hydra/core#"
+  xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+  xmlns:dcat="http://www.w3.org/ns/dcat#"
+  xmlns:dct="http://purl.org/dc/terms/"
+  xmlns:schema="http://schema.org/"
+  xmlns:skos="http://www.w3.org/2004/02/skos/core#"
+  xmlns:vcard="http://www.w3.org/2006/vcard/ns#"
+  xmlns:dcatde="http://dcat-ap.de/def/dcatde/1.0.1/"
+>
+  <dcat:Catalog rdf:about="http://opendata.schleswig-holstein.de">
+  
+<dcat:dataset>
+<dcat:Dataset rdf:about="https://opendata.schleswig-holstein.de/dataset/automatische-zahlstelle-neustadt i. h.-süd-2012">
+<dct:temporal>
+<dct:PeriodOfTime rdf:nodeID="N773ff85828024b228e9181e2ae5ef643">
+<schema:endDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2012-12-31T00:00:00</schema:endDate>
+<schema:startDate rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2012-01-01T00:00:00</schema:startDate>
+</dct:PeriodOfTime>
+</dct:temporal>
+<dcatde:contributorID rdf:resource="http://dcat-ap.de/def/contributors/schleswigHolstein"/>
+<dct:publisher rdf:resource="https://opendata.schleswig-holstein.de/organization/1982b7c4-fb92-42aa-a25a-6ddb492d7e5d"/>
+<dcat:keyword>LKW</dcat:keyword>
+<dcat:keyword>A1</dcat:keyword>
+<dcat:keyword>Autobahn</dcat:keyword>
+<dcat:distribution rdf:resource="https://www.bast.de/videos/2012/zst1105.zip"/>
+<dcatde:licenseAttributionByText>Bundesanstalt für Straßenwesen (BASt)</dcatde:licenseAttributionByText>
+<dct:modified rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2021-12-02T00:00:00</dct:modified>
+<dct:title>Automatische Zählstelle Neustadt i. H.-Süd 2012</dct:title>
+<dcat:keyword>Verkehrszählung</dcat:keyword>
+<dcat:keyword>PKW</dcat:keyword>
+<dct:license rdf:resource="http://dcat-ap.de/def/licenses/dl-by-de/2.0"/>
+<dct:issued rdf:datatype="http://www.w3.org/2001/XMLSchema#dateTime">2021-12-02T00:00:00</dct:issued>
+<dct:spatial>
+<dct:Location rdf:nodeID="Nc211cc07fede4137b843b6505ba8a3a2">
+<locn:geometry rdf:datatype="https://www.iana.org/assignments/media-types/application/vnd.geo+json">
+{"type": "Point", "coordinates": [10.78103, 54.10379]}
+</locn:geometry>
+<locn:geometry rdf:datatype="http://www.opengis.net/ont/geosparql#wktLiteral">POINT (10.7810 54.1038)</locn:geometry>
+</dct:Location>
+</dct:spatial>
+<dcat:theme rdf:resource="http://publications.europa.eu/resource/authority/data-theme/TRAN"/>
+<dct:identifier>
+https://opendata.schleswig-holstein.de/dataset/automatische-zahlstelle-neustadt i. h.-süd-2012
+</dct:identifier>
+<dct:language rdf:resource="http://publications.europa.eu/resource/authority/language/DEU"/>
+<dct:description>
+Auf Deutschlands Autobahnen und außerörtlichen Bundesstraßen werden an automatischen Zählstellen alle Fahrzeuge permanent gezählt. Je nach eingesetztem Gerätetyp können bis zu neun Fahrzeugarten unterschieden werden. Die Daten werden von den Bundesländern erhoben und der BASt quartalsweise übermittelt. Auf dieser Basis werden Jahresfahrleistungen und durchschnittliche tägliche Verkehrsstärken (DTV) berechnet. Die Daten bilden eine wichtige Grundlage für verkehrs- oder bautechnische Entscheidungen und Maßnahmen Informationen zu den verwendeten Erfassungsarten und Abkürzungen: - [Erfassungsarten nach TLS (PDF)](https://www.bast.de/BASt_2017/DE/Verkehrstechnik/Fachthemen/v2-verkehrszaehlung/pdf-dateien/erfassungsarten.pdf?__blob=publicationFile) - [Abkürzungen und CSV-Datensatzbeschreibung (PDF)](https://www.bast.de/BASt_2017/DE/Verkehrstechnik/Fachthemen/v2-verkehrszaehlung/pdf-dateien/abkuerzungen.pdf?__blob=publicationFile) [Informationsseite zur Zählstelle](https://www.bast.de/DE/Verkehrstechnik/Fachthemen/v2-verkehrszaehlung/Aktuell/zaehl_aktuell_node.html?nn=1819516&cms_detail=1105&cms_map=0)
+</dct:description>
+</dcat:Dataset>
+</dcat:dataset>
+</dcat:Catalog>
+</rdf:RDF>