diff --git a/README_SETUP.md b/README_SETUP.md
index 0ac69cf928ae0267e60a8a5ec576b5117236e24c..e5ebd969462f7d2c28a329e2c6b6e1bab1252775 100644
--- a/README_SETUP.md
+++ b/README_SETUP.md
@@ -39,9 +39,11 @@ Optional h5-crawler:
    extroot. E.g. `sudo mount -o bind extroot
    ../../caosdb-deploy/profiles/empty/paths/extroot` (or whatever path
    the extroot of the empty profile to be used is located at).
-3. Start an empty (!) CaosDB instance (with the mounted extroot). The
-   database will be cleared during testing, so it's important to use
+3. Start (or restart) an empty (!) CaosDB instance (with the mounted extroot).
+   The database will be cleared during testing, so it's important to use
    an empty instance.
+   Make sure your configuration for the python caosdb module is correct and
+   allows to connect to the server.
 4. Run `test.sh`.  Note that this may modify content of the `integrationtest/extroot/` directory.
 
 ## Code Formatting
diff --git a/integrationtests/test.sh b/integrationtests/test.sh
index 5efd549551670d3a4581380271ac2aba4b80a10f..71af543643a35cb082f10a24440c5ea87df946c9 100755
--- a/integrationtests/test.sh
+++ b/integrationtests/test.sh
@@ -34,11 +34,11 @@ echo "Filling the database"
 echo "Testing the crawler database"
 python3 -m pytest test_crawler_with_cfoods.py
 echo "make a change"
-pushd extroot
+cd extroot
 egrep -liRZ 'A description of another example' . | xargs -0 -l sed -i -e 's/A description of another example/A description of this example/g'
 # remove a file to check that this does not lead to a crawler crash
 mv DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx_back
-popd
+cd ..
 echo "run crawler"
 ./crawl.py  / | tee $OUT
 # rename the moved file
@@ -58,9 +58,9 @@ then
 fi
 set -e
 echo "undo changes"
-pushd extroot
+cd extroot
 egrep -liRZ 'A description of this example' . | xargs -0 -l sed -i -e 's/A description of this example/A description of another example/g'
-popd
+cd ..
 python3 test_table.py
 # TODO the following test deletes lots of the data inserted by the crawler
 echo "Testing im and export"
diff --git a/integrationtests/test_im_und_export.py b/integrationtests/test_im_und_export.py
index db26249b14d3d547db8dcea4e49de2aa07479e5b..27995080aa5cbeeb6f562226d4f0c0ca19c64d83 100644
--- a/integrationtests/test_im_und_export.py
+++ b/integrationtests/test_im_und_export.py
@@ -3,15 +3,14 @@ import os
 from tempfile import TemporaryDirectory
 
 import caosdb as db
-
-from caosadvancedtools.export_related import export
+from caosadvancedtools.export_related import export_related_to
 from caosadvancedtools.import_from_xml import import_xml
 
 if __name__ == "__main__":
     print("Conducting im- and export tests")
     rec = db.execute_query("FIND 2019-02-03_really_cool_finding", unique=True)
     directory = TemporaryDirectory()
-    export(rec.id, directory=directory.name)
+    export_related_to(rec.id, directory=directory.name)
     # delete everything
     recs = db.execute_query("FIND entity with id>99")
     recs.delete()
diff --git a/src/caosadvancedtools/collect_datamodel.py b/src/caosadvancedtools/collect_datamodel.py
index 1ca68068e713dd34ebc3368ad760461578dee4ef..806d15333cac7f745ce2fb82a02e0214ad2b6616 100644
--- a/src/caosadvancedtools/collect_datamodel.py
+++ b/src/caosadvancedtools/collect_datamodel.py
@@ -26,14 +26,19 @@ import argparse
 import os
 
 import caosdb as db
+from caosdb.apiutils import retrieve_entities_with_ids
+
+from export_related import export
 
 
 def get_dm():
-    rts = set([r.name for r in db.execute_query("SELECT name FROM RECORDTYPE")])
+    rts = set([(r.id, r.name) for r
+               in db.execute_query("SELECT name FROM RECORDTYPE")])
 
     if None in rts:
         rts.remove(None)
-    ps = set([r.name for r in db.execute_query("SELECT name FROM PROPERTY")])
+    ps = set([(r.id, r.name) for r
+              in db.execute_query("SELECT name FROM PROPERTY")])
 
     if None in ps:
         ps.remove(None)
@@ -47,18 +52,26 @@ def get_parser():
                    "be stored")
     p.add_argument("-c", "--compare", help="directory where the datamodel that"
                    " shall be compared is stored")
+    p.add_argument("-x", "--xml", action="store_true",
+                   help="store xml as well")
 
     return p
 
 
-def store(directory):
+def store(directory, xml=False):
     rts, ps = get_dm()
 
     os.makedirs(directory, exist_ok=True)
     with open(os.path.join(directory, "recordtypes.txt"), "w") as fi:
-        fi.write(",".join(rts))
+        fi.write(",".join([el[1] for el in rts]))
     with open(os.path.join(directory, "properties.txt"), "w") as fi:
-        fi.write(",".join(ps))
+        fi.write(",".join([el[1] for el in ps]))
+
+    if xml:
+        cont = retrieve_entities_with_ids(
+            [el[0] for el in rts]+[el[0] for el in ps])
+
+        export(cont, directory)
 
 
 def load_dm(directory):
@@ -104,7 +117,7 @@ if __name__ == "__main__":
     args = p.parse_args()
 
     if args.store:
-        store(args.store)
+        store(args.store, xml=args.xml)
 
     if args.compare:
         compare(args.compare)
diff --git a/src/caosadvancedtools/crawler.py b/src/caosadvancedtools/crawler.py
index 33a732e89b179b9580b914e4c640baec3d307ce8..5a8d428655791169557f5c292d30698f6ad69798 100644
--- a/src/caosadvancedtools/crawler.py
+++ b/src/caosadvancedtools/crawler.py
@@ -56,6 +56,7 @@ from .datainconsistency import DataInconsistencyError
 from .datamodel_problems import DataModelProblems
 from .guard import RETRIEVE, ProhibitedException
 from .guard import global_guard as guard
+from .serverside.helper import send_mail as main_send_mail
 from .suppressKnown import SuppressKnown
 
 logger = logging.getLogger(__name__)
@@ -500,7 +501,6 @@ carefully and if the changes are ok, click on the following link:
         """.format(url=caosdb_config["Connection"]["url"],
                    filename=filename,
                    changes="\n".join(changes))
-        sendmail = caosdb_config["Misc"]["sendmail"]
         try:
             fro = caosdb_config["advancedtools"]["crawler.from_mail"]
             to = caosdb_config["advancedtools"]["crawler.to_mail"]
@@ -510,8 +510,11 @@ carefully and if the changes are ok, click on the following link:
                          "'from_mail' and 'to_mail'.")
             return
 
-        p = subprocess.Popen([sendmail, "-f", fro, to], stdin=subprocess.PIPE)
-        p.communicate(input=text.encode())
+        main_send_mail(
+            from_addr=fro,
+            to=to,
+            subject="Crawler Update",
+            body=text)
 
     def push_identifiables_to_CaosDB(self, cfood):
         """
diff --git a/src/caosadvancedtools/export_related.py b/src/caosadvancedtools/export_related.py
index 00f440d28a2ae1da14132083e4b8d3c5003d1b65..69b588c34cc7c8123ab4291f6d8f76f06e7400be 100755
--- a/src/caosadvancedtools/export_related.py
+++ b/src/caosadvancedtools/export_related.py
@@ -96,12 +96,15 @@ def invert_ids(entities):
     apply_to_ids(entities, lambda x: x*-1)
 
 
-def export(rec_id, directory="."):
+def export_related_to(rec_id, directory="."):
     if not isinstance(rec_id, int):
         raise ValueError("rec_id needs to be an integer")
     ent = db.execute_query("FIND {}".format(rec_id), unique=True)
     cont = recursively_collect_related(ent)
+    export(cont, directory=directory)
 
+
+def export(cont, directory="."):
     directory = os.path.abspath(directory)
     dl_dir = os.path.join(directory, "downloads")
 
@@ -119,6 +122,9 @@ def export(rec_id, directory="."):
                 print("Failed download of:", target)
 
     invert_ids(cont)
+
+    for el in cont:
+        el.version = None
     xml = etree.tounicode(cont.to_xml(
         local_serialization=True), pretty_print=True)
 
@@ -147,4 +153,4 @@ if __name__ == "__main__":
     parser = defineParser()
     args = parser.parse_args()
 
-    export(args.id, directory=args.directory)
+    export_related_to(args.id, directory=args.directory)
diff --git a/src/caosadvancedtools/models/parser.py b/src/caosadvancedtools/models/parser.py
index 5e1532e03690e753b8926b87b01db4e3a89f2c4c..5b17ab4a191c4bb797c0c79d126094986e0491c4 100644
--- a/src/caosadvancedtools/models/parser.py
+++ b/src/caosadvancedtools/models/parser.py
@@ -316,19 +316,22 @@ class Parser(object):
                     self.model[name].description = prop
 
                 elif prop_name == "recommended_properties":
-                    self._add_to_recordtype(name, prop, importance=db.RECOMMENDED)
+                    self._add_to_recordtype(
+                        name, prop, importance=db.RECOMMENDED)
 
                     for n, e in prop.items():
                         self._treat_entity(n, e)
 
                 elif prop_name == "obligatory_properties":
-                    self._add_to_recordtype(name, prop, importance=db.OBLIGATORY)
+                    self._add_to_recordtype(
+                        name, prop, importance=db.OBLIGATORY)
 
                     for n, e in prop.items():
                         self._treat_entity(n, e)
 
                 elif prop_name == "suggested_properties":
-                    self._add_to_recordtype(name, prop, importance=db.SUGGESTED)
+                    self._add_to_recordtype(
+                        name, prop, importance=db.SUGGESTED)
 
                     for n, e in prop.items():
                         self._treat_entity(n, e)
@@ -355,9 +358,13 @@ class Parser(object):
         self.treated.append(name)
 
     def _check_datatypes(self):
-        """ checks if datatype is valid.
-        datatype of properties is simply initialized with string. Here over
-        properties is iterated and datatype is corrected. """
+        """Checks if the datatype is valid. 
+
+        Iterate over all properties in this datamodel and choose a
+        valid datatype from the original datatype string in the yaml
+        file. Raise a ValueError if no datatype can be found.
+
+        """
 
         for key, value in self.model.items():
             if isinstance(value, db.Property):
@@ -368,7 +375,8 @@ class Parser(object):
                     try:
                         value.datatype = db.__getattribute__(value.datatype)
                     except AttributeError:
-                        raise ValueError("Unknown Datatype.")
+                        raise ValueError(
+                            "Property {} has an unknown datatype: {}".format(value.name, value.datatype))
 
     def _set_recordtypes(self):
         """ properties are defined in first iteration; set remaining as RTs """