diff --git a/.docker/Dockerfile b/.docker/Dockerfile
index b300a1a97aa22b3eafc91ef89c01bbd7111edd62..f7353e059d8cd027f08403d6f6527ffbcaabc965 100644
--- a/.docker/Dockerfile
+++ b/.docker/Dockerfile
@@ -10,14 +10,16 @@ RUN apt-get update && \
     tox \
     -y
 COPY .docker/wait-for-it.sh /wait-for-it.sh
+ARG PYLIB
 ADD https://gitlab.indiscale.com/api/v4/projects/97/repository/commits/${PYLIB} \
     pylib_version.json
 RUN git clone https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git && \
     cd caosdb-pylib && git checkout ${PYLIB} && pip3 install .
+ARG ADVANCED
 ADD https://gitlab.indiscale.com/api/v4/projects/104/repository/commits/${ADVANCED} \
     advanced_version.json
 RUN git clone https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git && \
-    cd caosdb-advanced-user-tools && git checkout ${ADVANCED} && pip3 install .
+    cd caosdb-advanced-user-tools && git checkout ${ADVANCED} && pip3 install .[h5-crawler]
 COPY . /git
 
 # Delete .git because it is huge.
diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml
index bbee24fbd8c898c479a0fafa13000ddf506d00eb..e5bb4c9b8ca6ad1750922cb07c92cd6c5eb77c6b 100644
--- a/.docker/docker-compose.yml
+++ b/.docker/docker-compose.yml
@@ -17,8 +17,8 @@ services:
       - type: bind
         source: ./cert
         target: /opt/caosdb/cert
-      - type: volume
-        source: extroot
+      - type: bind
+        source: "../integrationtests/test_data/extroot"
         target: /opt/caosdb/mnt/extroot
       - type: volume
         source: scripting
@@ -36,7 +36,6 @@ services:
       CAOSDB_CONFIG_TRANSACTION_BENCHMARK_ENABLED: "TRUE"
 volumes:
   scripting:
-  extroot:
   authtoken:
 networks:
   caosnet:
diff --git a/.gitignore b/.gitignore
index 11c17317428964b82b47d55399a4dde1a9e698a9..5599d7d263c8927025e128c37eabb185025bf96b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,3 +13,7 @@ provenance.yml
 *.jks
 *.tar.gz
 *.sql
+/integrationtests/test-profile/custom/other/cert/
+src/doc/_apidoc/
+start_caosdb_docker.sh
+src/doc/_apidoc
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a30140e684b465d40b964f1bfb9b97959b29834d..30a8cd8fe4c08fd3fe0f3f98aaa56b83cb623086 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -152,11 +152,11 @@ inttest:
       - CAOSDB_TAG=$CAOSDB_TAG docker-compose  up -d
 
         # Store versions of CaosDB parts
-      - docker exec -u 0 -t docker_caosdb-server_1 cat /opt/caosdb/git/caosdb_pylib_commit > hash_pylib
-      - docker exec -u 0 -t docker_caosdb-server_1 cat /opt/caosdb/git/caosdb_webui_commit > hash_webui
-      - docker exec -u 0 -t docker_caosdb-server_1 cat /opt/caosdb/git/caosdb_server_commit > hash_server
-      - docker exec -u 0 -t docker_caosdb-server_1 cat /opt/caosdb/git/caosdb_mysqlbackend_commit > hash_mysql
-      - docker exec -u 0 -t docker_caosdb-server_1 cat /opt/caosdb/git/caosdb_proto_commit > hash_proto
+      - docker exec -u 0 -t docker-caosdb-server-1 cat /opt/caosdb/git/caosdb_pylib_commit > hash_pylib
+      - docker exec -u 0 -t docker-caosdb-server-1 cat /opt/caosdb/git/caosdb_webui_commit > hash_webui
+      - docker exec -u 0 -t docker-caosdb-server-1 cat /opt/caosdb/git/caosdb_server_commit > hash_server
+      - docker exec -u 0 -t docker-caosdb-server-1 cat /opt/caosdb/git/caosdb_mysqlbackend_commit > hash_mysql
+      - docker exec -u 0 -t docker-caosdb-server-1 cat /opt/caosdb/git/caosdb_proto_commit > hash_proto
       - cat hash_server
       - cat hash_proto
       - cat hash_mysql
@@ -167,8 +167,8 @@ inttest:
       - /bin/sh ./run.sh
 
         # Save logs
-      - docker logs docker_caosdb-server_1 &> ../caosdb_log.txt
-      - docker logs docker_sqldb_1 &> ../mariadb_log.txt
+      - docker logs docker-caosdb-server-1 &> ../caosdb_log.txt
+      - docker logs docker-sqldb-1 &> ../mariadb_log.txt
       - cd ..
 
         # Stop the server
@@ -211,6 +211,17 @@ build-testenv:
       - PYLIB=${PYLIB:-dev}
       - echo $PYLIB
 
+      - if [ -z "$ADVANCED" ]; then
+          if echo "$CI_COMMIT_REF_NAME" | grep -c "^f-" ; then
+            echo "Check if advanced user tools have branch $CI_COMMIT_REF_NAME" ;
+            if wget https://gitlab.indiscale.com/api/v4/projects/104/repository/branches/${CI_COMMIT_REF_NAME} ; then
+              ADVANCED=$CI_COMMIT_REF_NAME ;
+            fi;
+          fi;
+        fi;
+      - ADVANCED=${ADVANCED:-dev}
+      - echo $ADVANCED
+
       - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
         # use here general latest or specific branch latest...
       - docker build 
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d0a2883005d6651f0ba3ef22b9fa5fe0d03349aa..8d80a17c3ad6321e115e523d1ca8082385e50b8b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,10 +10,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 ### Added
 
 * Everything
+* Added new converters for tables: CSVTableConverter and XLSXTableConverter
+* Possibility to authorize updates as in the old crawler
+* Allow authorization of inserts
+* Allow splitting cfoods into multiple yaml documents
+* Implemented macros
+* Converters can now filter the list of children
+* You can now crawl data with name conflicts: `synchronize(unique_names=False)`
 
 ### Changed
 
-* Renamed module from `newcrawler` to `caoscrawler`
+* MAINT: Renamed module from `newcrawler` to `caoscrawler`
+* MAINT: Removed global converters from `crawl.py`
 
 ### Deprecated
 
@@ -21,4 +29,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 
 ### Fixed
 
+* FIX: #12
+* FIX: #14
+* FIX: Variables are now also replaced when the value is given as a list.
+* FIX: #35 Parent cannot be set from value
+* [#6](https://gitlab.com/caosdb/caosdb-crawler/-/issues/6): Fixed many type
+  hints to be compatible to python 3.8
+* [#9](https://gitlab.com/caosdb/caosdb-crawler/-/issues/9): Sclaras of types
+  different than string can now be given in cfood definitions
+
+
 ### Security
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..95fc2bf61473b94decfb43d0c5ba0d3fda535a07
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,48 @@
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com>
+# Copyright (C) 2020 Daniel Hornung <d.hornung@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+
+# This Makefile is a wrapper for several other scripts.
+
+.PHONY: help
+
+help:
+	@echo 'Type `make doc` for documentation, or `make install` for (local) installation.'
+
+doc:
+	$(MAKE) -C src/doc html
+
+install:
+	@echo "Not implemented yet, use pip for installation."
+
+check: style lint
+.PHONY: check
+
+style:
+	pycodestyle --count src unittests
+.PHONY: style
+
+lint:
+	pylint --unsafe-load-any-extension=y -d all -e E,F src/caoscrawler
+.PHONY: lint
+
+unittest:
+	tox -r
+.PHONY: unittest
diff --git a/README.md b/README.md
index 59b88aaa36ed97d8c2cc9e4474820e3dad4a478b..8576e5c969556005fdeb346ef2cdfadf1b7fc266 100644
--- a/README.md
+++ b/README.md
@@ -25,6 +25,8 @@ After installation of the package run (within the project folder):
 pytest
 ```
 
+## Integration Tests
+see `integrationtests/README.md`
 
 # Contributers
 
diff --git a/integrationtests/README.md b/integrationtests/README.md
index 96789ed9f02036a0c7cc25ca1a60d9f0042a5557..88d55902e3fdc5836baefd97c3192cc9ff01e7bd 100644
--- a/integrationtests/README.md
+++ b/integrationtests/README.md
@@ -1,2 +1,3 @@
 1. Mount test_data/extroot as extroot folder in the CaosDB server
 2. use an empty server
+3. run pytest from `src`: `python -m pytest ../integrationtests`
diff --git a/integrationtests/basic_example/test.py b/integrationtests/basic_example/test_basic.py
similarity index 76%
rename from integrationtests/basic_example/test.py
rename to integrationtests/basic_example/test_basic.py
index 6e35f7f2e4532acb5a2c3c80d06d9faeabd0fe0a..b24a1c658cfc9e23ca0ba2de266161864cb6b66c 100755
--- a/integrationtests/basic_example/test.py
+++ b/integrationtests/basic_example/test_basic.py
@@ -28,12 +28,13 @@
 module description
 """
 
+from caosadvancedtools.crawler import Crawler as OldCrawler
 import os
 from caosdb import EmptyUniqueQueryError
 import argparse
 import sys
 from argparse import RawTextHelpFormatter
-from caoscrawler import Crawler
+from caoscrawler import Crawler, SecurityMode
 import caosdb as db
 from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
 import pytest
@@ -41,8 +42,8 @@ from caosadvancedtools.models.parser import parse_model_from_yaml
 import yaml
 
 # TODO is not yet merged in caosadvancedtools
-from caosadvancedtools.testutils import clear_database, set_test_key
-set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+#from caosadvancedtools.testutils import clear_database, set_test_key
+# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 
 
 def rfp(*pathcomponents):
@@ -53,6 +54,11 @@ def rfp(*pathcomponents):
     return os.path.join(os.path.dirname(__file__), *pathcomponents)
 
 
+@pytest.fixture
+def clear_database():
+    db.execute_query("FIND Entity").delete()
+
+
 @pytest.fixture
 def usemodel():
     model = parse_model_from_yaml(rfp("model.yml"))
@@ -86,8 +92,8 @@ def ident():
 def crawl_standard_test_directory(cr: Crawler,
                                   subdir: str = "examples_article",
                                   cfood: str = "scifolder_cfood.yml"):
-    cr.crawl_directory(rfp("..", "unittests", "test_directories", subdir),
-                       rfp("..", "unittests", cfood))
+    cr.crawl_directory(rfp("..", "..", "unittests", "test_directories", subdir),
+                       rfp("..", "..", "unittests", cfood))
 
 
 @pytest.fixture
@@ -102,15 +108,13 @@ def crawler_extended(ident):
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, cfood="scifolder_extended.yml")
     # correct paths for current working directory
-    updateList = cr.updateList
-    fileList = [r for r in updateList if r.role == "File"]
-    for f in fileList:
-        f.file = rfp("..", "unittests", "test_directories",
-                     "examples_article", f.file)
+    file_list = [r for r in cr.target_data if r.role == "File"]
+    for f in file_list:
+        f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     return cr
 
 
-def test_single_insertion(clear_database, usemodel, crawler):
+def test_single_insertion(clear_database, usemodel, crawler, ident):
     ins, ups = crawler.synchronize()
 
     # This test also generates the file records.xml used in some of the unittesets:
@@ -118,7 +122,7 @@ def test_single_insertion(clear_database, usemodel, crawler):
     for i in reversed(range(len(res))):
         if res[i].parents[0].name == "PyTestInfo":
             del res[i]
-    filename = rfp("..", "unittests", "records.xml")
+    filename = rfp("..", "..", "unittests", "records.xml")
     with open(filename, "w") as f:
         xml = res.to_xml()
         # Remove noscript and transaction benchmark:
@@ -131,10 +135,9 @@ def test_single_insertion(clear_database, usemodel, crawler):
     assert len(ups) == 0
 
     # Do a second run on the same data, there should be no changes:
-    crawler = Crawler(debug=True, identifiableAdapter=ident_adapt)
-    crawler.copy_attributes = Mock()
-    crawler.crawl_directory(rfp("../unittests/test_directories", "examples_article"),
-                            rfp("../unittests/scifolder_cfood.yml"))
+    crawler = Crawler(debug=True, identifiableAdapter=ident)
+    crawler.crawl_directory(rfp("../../unittests/test_directories", "examples_article"),
+                            rfp("../../unittests/scifolder_cfood.yml"))
     ins, ups = crawler.synchronize()
     assert len(ins) == 0
     assert len(ups) == 0
@@ -157,7 +160,7 @@ def test_insertion(clear_database, usemodel, ident, crawler):
     # Do a second run on the same data, there should a new insert:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.updateList) == 3
+    assert len(cr.target_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 1
     assert len(ups) == 0
@@ -165,7 +168,29 @@ def test_insertion(clear_database, usemodel, ident, crawler):
     # Do it again to check whether nothing is changed:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.updateList) == 3
+    assert len(cr.target_data) == 3
+    ins, ups = cr.synchronize()
+    assert len(ins) == 0
+    assert len(ups) == 0
+
+
+def test_insert_auth(clear_database, usemodel, ident, crawler):
+    ins, ups = crawler.synchronize()
+
+    # Do a second run on the same data, there should a new insert:
+    cr = Crawler(debug=True, identifiableAdapter=ident, securityMode=SecurityMode.RETRIEVE)
+    crawl_standard_test_directory(cr, "example_insert")
+    assert len(cr.target_data) == 3
+    ins, ups = cr.synchronize()
+    assert len(ins) == 1
+    assert not ins[0].is_valid()
+    nins, nups = OldCrawler.update_authorized_changes(cr.run_id)
+    assert nins == 1
+
+    # Do it again to check whether nothing is changed:
+    cr = Crawler(debug=True, identifiableAdapter=ident)
+    crawl_standard_test_directory(cr, "example_insert")
+    assert len(cr.target_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 0
     assert len(ups) == 0
@@ -180,9 +205,9 @@ def test_insertion_and_update(clear_database, usemodel, ident, crawler):
 
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_overwrite_1")
-    # print(cr.updateList)
+    # print(cr.target_data)
     # cr.save_debug_data(rfp("provenance.yml"))
-    assert len(cr.updateList) == 3
+    assert len(cr.target_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 0
     assert len(ups) == 1
@@ -197,7 +222,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     crawl_standard_test_directory(cr)
 
     # Test the addition of a single property:
-    l = cr.updateList
+    l = cr.target_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -213,7 +238,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     # Test the change within one property:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr)
-    l = cr.updateList
+    l = cr.target_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -227,7 +252,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     # Changing the date should result in a new insertion:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr)
-    l = cr.updateList
+    l = cr.target_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -244,24 +269,23 @@ def test_file_insertion_dry(clear_database, usemodel, ident):
     crawler_extended = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(
         crawler_extended, cfood="scifolder_extended.yml")
-    updateList = crawler_extended.updateList
-    fileList = [r for r in updateList if r.role == "File"]
-    assert len(fileList) == 11
+    file_list = [r for r in crawler_extended.target_data if r.role == "File"]
+    assert len(file_list) == 11
 
-    for f in fileList:
+    for f in file_list:
         assert f.path.endswith("README.md")
-        assert f.path == f.file
+        assert f.path[1:] == f.file
 
     ins, ups = crawler_extended.synchronize(commit_changes=False)
     assert len(ups) == 0
-    fileList_ins = [r for r in ins if r.role == "File"]
-    assert len(fileList_ins) == 11
+    file_list_ins = [r for r in ins if r.role == "File"]
+    assert len(file_list_ins) == 11
 
 
 def test_file_insertion(clear_database, usemodel, ident, crawler_extended):
     ins, ups = crawler_extended.synchronize(commit_changes=True)
-    fileList_ins = [r for r in ins if r.role == "File"]
-    assert len(fileList_ins) == 11
+    file_list_ins = [r for r in ins if r.role == "File"]
+    assert len(file_list_ins) == 11
 
     assert db.execute_query("COUNT File") > 0
 
@@ -276,16 +300,14 @@ def test_file_insertion(clear_database, usemodel, ident, crawler_extended):
 
 def test_file_update(clear_database, usemodel, ident, crawler_extended):
     ins1, ups1 = crawler_extended.synchronize(commit_changes=True)
-    fileList_ins = [r for r in ins1 if r.role == "File"]
+    file_list_ins = [r for r in ins1 if r.role == "File"]
 
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, cfood="scifolder_extended.yml")
 
-    updateList = cr.updateList
-    fileList = [r for r in updateList if r.role == "File"]
-    for f in fileList:
-        f.file = rfp("..", "unittests", "test_directories",
-                     "examples_article", f.file)
+    file_list = [r for r in cr.target_data if r.role == "File"]
+    for f in file_list:
+        f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     ins2, ups2 = cr.synchronize(commit_changes=True)
     assert len(ups1) == 0
     assert len(ups2) == 0
@@ -298,11 +320,9 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
     cr2 = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr2, cfood="scifolder_extended2.yml")
 
-    updateList = cr2.updateList
-    fileList = [r for r in updateList if r.role == "File"]
-    for f in fileList:
-        f.file = rfp("..", "unittests", "test_directories",
-                     "examples_article", f.file)
+    file_list = [r for r in cr2.target_data if r.role == "File"]
+    for f in file_list:
+        f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     ins3, ups3 = cr2.synchronize(commit_changes=True)
     assert len(ups3) == 11
 
@@ -313,4 +333,4 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
     # TODO: Implement file update checks (based on checksum)
     # Add test with actual file update:
     # assert len(ins2) == 0
-    # assert len(ups2) == len(fileList_ins)
+    # assert len(ups2) == len(file_list_ins)
diff --git a/integrationtests/pycaosdb.ini b/integrationtests/pycaosdb.ini
new file mode 100644
index 0000000000000000000000000000000000000000..a4f429736c9b46c8987d05a02724725295f32081
--- /dev/null
+++ b/integrationtests/pycaosdb.ini
@@ -0,0 +1,29 @@
+[Connection]
+url=https://localhost:10443/
+username=admin
+debug=0
+#cacert=/home//CaosDB/caosdb-deploy/profiles/default/custom/other/cert/caosdb.cert.pem
+password_method=plain
+password=caosdb
+
+ssl_insecure=True
+timeout=5000
+[Container]
+debug=0
+
+#[Crawler]
+#oldprefix=/ExperimentalData/
+#newprefix=/home/professional/CaosDB/caosdb-advanced-user-tools/integrationtests/extroot/ExperimentalData
+#[IntegrationTests]
+#test_server_side_scripting.bin_dir=/home/professional/CaosDB/caosdb-pyinttest/resources
+
+[Misc]
+sendmail=sendmail_to_file
+#sendmail=/usr/local/bin/sendmail_to_file
+entity_loan.curator_mail_from=admin@indiscale.com
+entity_loan.curator_mail_to=admin@indiscale.com
+[sss_helper]
+external_uri = https://localhost:10443
+[advancedtools]
+crawler.from_mail=admin@indiscale.com
+crawler.to_mail=admin@indiscale.com
diff --git a/integrationtests/realworld_example/crawl.sh b/integrationtests/realworld_example/crawl.sh
deleted file mode 100755
index 55a2a331fe517a539e2dd937ac35605c72b496c9..0000000000000000000000000000000000000000
--- a/integrationtests/realworld_example/crawl.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-python -m caosadvancedtools.loadFiles /opt/caosdb/mnt/extroot/data
-python load_and_insert_json_models.py
-python test_dataset_crawler.py
diff --git a/integrationtests/realworld_example/load_and_insert_json_models.py b/integrationtests/realworld_example/load_and_insert_json_models.py
deleted file mode 100644
index 682fd9c77531e63ed18dd13417399ad0d18a8de2..0000000000000000000000000000000000000000
--- a/integrationtests/realworld_example/load_and_insert_json_models.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python3
-# encoding: utf-8
-#
-# This file is a part of the CaosDB Project.
-#
-# Copyright (C) 2022 Indiscale GmbH <info@indiscale.com>
-# Copyright (C) 2022 Henrik tom Wörden <h.tomwoerden@indiscale.com>
-# Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <https://www.gnu.org/licenses/>.
-#
-import sys
-
-from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
-
-
-def main():
-    # First load dataspace data model
-    dataspace_definitions = parse_model_from_json_schema(
-        "schema/dataspace.schema.json")
-    dataspace_definitions.sync_data_model(noquestion=True)
-
-    # Then general dataset definitions
-    dataset_definitions = parse_model_from_json_schema(
-        "schema/dataset.schema.json")
-    dataset_definitions.sync_data_model(noquestion=True)
-
-    # Finally, add inheritances as defined in yaml
-    dataset_inherits = parse_model_from_yaml(
-        "schema/dataset-inheritance.yml")
-    dataset_inherits.sync_data_model(noquestion=True)
-
-
-if __name__ == "__main__":
-
-    sys.exit(main())
diff --git a/integrationtests/realworld_example/test_dataset_crawler.py b/integrationtests/realworld_example/test_dataset_crawler.py
deleted file mode 100644
index 8713f490399471dc324c542f5d0e96bfe161b60a..0000000000000000000000000000000000000000
--- a/integrationtests/realworld_example/test_dataset_crawler.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python3
-# encoding: utf-8
-#
-# This file is a part of the CaosDB Project.
-#
-# Copyright (C) 2022 Indiscale GmbH <info@indiscale.com>
-# Copyright (C) 2022 Henrik tom Wörden <h.tomwoerden@indiscale.com>
-# Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <https://www.gnu.org/licenses/>.
-#
-
-"""
-module description
-"""
-import json
-import os
-
-import caosdb as db
-
-from caoscrawler.crawl import Crawler
-from caoscrawler.converters import JSONFileConverter, DictConverter
-from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
-from caoscrawler.structure_elements import File, JSONFile, Directory
-import pytest
-from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
-
-#from caosadvancedtools.testutils import clear_database, set_test_key
-import sys
-
-# TODO is not yet merged in caosadvancedtools
-# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
-
-
-def rfp(*pathcomponents):
-    """
-    Return full path.
-    Shorthand convenience function.
-    """
-    return os.path.join(os.path.dirname(__file__), *pathcomponents)
-
-
-DATADIR = rfp("..", "test_data", "extroot", "realworld_example")
-
-
-@pytest.fixture
-def usemodel():
-    # First load dataspace data model
-    dataspace_definitions = parse_model_from_json_schema(
-        os.path.join(DATADIR, "schema", "dataspace.schema.json"))
-    dataspace_definitions.sync_data_model(noquestion=True)
-
-    # Then general dataset definitions
-    dataset_definitions = parse_model_from_json_schema(
-        os.path.join(DATADIR, "schema", "dataset.schema.json"))
-    dataset_definitions.sync_data_model(noquestion=True)
-
-    # Finally, add inheritances as defined in yaml
-    dataset_inherits = parse_model_from_yaml(
-        os.path.join(DATADIR, "schema", "dataset-inheritance.yml"))
-    dataset_inherits.sync_data_model(noquestion=True)
-
-
-def test_dataset(
-        # clear_database,
-        usemodel):
-    # json_file_path = rfp("test_directories", "single_file_test_data", "testjson.json")
-
-    ident = CaosDBIdentifiableAdapter()
-    ident.register_identifiable(
-        "license", db.RecordType().add_parent("license").add_property("name"))
-    ident.register_identifiable("project_type", db.RecordType(
-    ).add_parent("project_type").add_property("name"))
-    ident.register_identifiable("Person", db.RecordType(
-    ).add_parent("Person").add_property("full_name"))
-
-    crawler = Crawler(debug=True, identifiableAdapter=ident)
-    crawler_definition = crawler.load_definition(
-        os.path.join(DATADIR, "dataset_cfoods.yml"))
-    # print(json.dumps(crawler_definition, indent=3))
-    # Load and register converter packages:
-    converter_registry = crawler.load_converters(crawler_definition)
-    # print("DictIntegerElement" in converter_registry)
-
-    records = crawler.start_crawling(
-        Directory("data", os.path.join(DATADIR, 'data')),
-        crawler_definition,
-        converter_registry
-    )
-    subd = crawler.debug_tree
-    subc = crawler.debug_metadata
-    # print(json.dumps(subc, indent=3))
-    # print(subd)
-    # print(subc)
-    # print(records)
-    ins, ups = crawler.synchronize()
-
-    dataspace = db.execute_query("FIND RECORD Dataspace WITH name=35 AND dataspace_id=20002 AND "
-                                 "archived=FALSE AND url='https://datacloud.de/index.php/f/7679'"
-                                 " AND Person", unique=True)
-    assert dataspace.get_property("start_date").value == "2022-03-01"
-    db.execute_query("FIND RECORD Person with full_name='Max Schmitt' AND"
-                     " given_name='Max'", unique=True)
-
-    dataset = db.execute_query(f"FIND RECORD Dataset with Dataspace={dataspace.id} AND title="
-                               "'Random numbers created on a random autumn day in a random office'"
-                               "", unique=True)
-    assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Person WITH full_name="
-                            "'Alexa Nozone' AND WHICH REFERENCES Person WITH full_name='Max Schmitt'"
-                            "") == 1
-    assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH "
-                            "start_datetime='2022-02-10T16:36:48+01:00'") == 1
diff --git a/integrationtests/test-profile/custom/other/restore/caosdb.2022-01-20T08:27:49.631552786+00:00.dump.sql b/integrationtests/test-profile/custom/other/restore/caosdb.2022-01-20T08:27:49.631552786+00:00.dump.sql
deleted file mode 100644
index c527a0040a469b5f48cbd50c786fe9ff24d545d5..0000000000000000000000000000000000000000
--- a/integrationtests/test-profile/custom/other/restore/caosdb.2022-01-20T08:27:49.631552786+00:00.dump.sql
+++ /dev/null
@@ -1,5716 +0,0 @@
--- MariaDB dump 10.19  Distrib 10.5.12-MariaDB, for debian-linux-gnu (x86_64)
---
--- Host: sqldb    Database: caosdb
--- ------------------------------------------------------
--- Server version	10.5.11-MariaDB-1:10.5.11+maria~focal
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Table structure for table `archive_collection_type`
---
-
-DROP TABLE IF EXISTS `archive_collection_type`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_collection_type` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `collection` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  UNIQUE KEY `archive_collection_type-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`),
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_collection_type_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_collection_type_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_collection_type_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_collection_type`
---
-
-LOCK TABLES `archive_collection_type` WRITE;
-/*!40000 ALTER TABLE `archive_collection_type` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_collection_type` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_data_type`
---
-
-DROP TABLE IF EXISTS `archive_data_type`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_data_type` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `datatype` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  UNIQUE KEY `archive_data_type-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`),
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  KEY `datatype` (`datatype`),
-  CONSTRAINT `archive_data_type_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_data_type_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_data_type_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_data_type_ibfk_4` FOREIGN KEY (`datatype`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_data_type`
---
-
-LOCK TABLES `archive_data_type` WRITE;
-/*!40000 ALTER TABLE `archive_data_type` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_data_type` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_date_data`
---
-
-DROP TABLE IF EXISTS `archive_date_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_date_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` int(11) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_date_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_date_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_date_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_date_data`
---
-
-LOCK TABLES `archive_date_data` WRITE;
-/*!40000 ALTER TABLE `archive_date_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_date_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_datetime_data`
---
-
-DROP TABLE IF EXISTS `archive_datetime_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_datetime_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` bigint(20) NOT NULL,
-  `value_ns` int(10) unsigned DEFAULT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_datetime_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_datetime_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_datetime_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_datetime_data`
---
-
-LOCK TABLES `archive_datetime_data` WRITE;
-/*!40000 ALTER TABLE `archive_datetime_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_datetime_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_desc_overrides`
---
-
-DROP TABLE IF EXISTS `archive_desc_overrides`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_desc_overrides` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `description` text COLLATE utf8_unicode_ci NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  UNIQUE KEY `archive_desc_overrides-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`),
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_desc_overrides_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_desc_overrides_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_desc_overrides_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_desc_overrides`
---
-
-LOCK TABLES `archive_desc_overrides` WRITE;
-/*!40000 ALTER TABLE `archive_desc_overrides` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_desc_overrides` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_double_data`
---
-
-DROP TABLE IF EXISTS `archive_double_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_double_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` double NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  `unit_sig` bigint(20) DEFAULT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_double_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_double_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_double_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_double_data`
---
-
-LOCK TABLES `archive_double_data` WRITE;
-/*!40000 ALTER TABLE `archive_double_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_double_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_entities`
---
-
-DROP TABLE IF EXISTS `archive_entities`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_entities` (
-  `id` int(10) unsigned NOT NULL,
-  `description` text COLLATE utf8_unicode_ci DEFAULT NULL,
-  `role` enum('RECORDTYPE','RECORD','FILE','DOMAIN','PROPERTY','DATATYPE','ROLE','QUERYTEMPLATE') COLLATE utf8_unicode_ci NOT NULL,
-  `acl` int(10) unsigned DEFAULT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  PRIMARY KEY (`id`,`_iversion`),
-  KEY `acl` (`acl`),
-  CONSTRAINT `archive_entities_ibfk_1` FOREIGN KEY (`id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE,
-  CONSTRAINT `archive_entities_ibfk_2` FOREIGN KEY (`acl`) REFERENCES `entity_acl` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_entities`
---
-
-LOCK TABLES `archive_entities` WRITE;
-/*!40000 ALTER TABLE `archive_entities` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_entities` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_enum_data`
---
-
-DROP TABLE IF EXISTS `archive_enum_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_enum_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` varbinary(255) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_enum_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_enum_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_enum_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_enum_data`
---
-
-LOCK TABLES `archive_enum_data` WRITE;
-/*!40000 ALTER TABLE `archive_enum_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_enum_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_files`
---
-
-DROP TABLE IF EXISTS `archive_files`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_files` (
-  `file_id` int(10) unsigned NOT NULL,
-  `path` text COLLATE utf8_unicode_ci NOT NULL,
-  `size` bigint(20) unsigned NOT NULL,
-  `hash` binary(64) DEFAULT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  PRIMARY KEY (`file_id`,`_iversion`),
-  CONSTRAINT `archive_files_ibfk_1` FOREIGN KEY (`file_id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_files`
---
-
-LOCK TABLES `archive_files` WRITE;
-/*!40000 ALTER TABLE `archive_files` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_files` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_integer_data`
---
-
-DROP TABLE IF EXISTS `archive_integer_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_integer_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` bigint(20) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  `unit_sig` bigint(20) DEFAULT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_integer_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_integer_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_integer_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_integer_data`
---
-
-LOCK TABLES `archive_integer_data` WRITE;
-/*!40000 ALTER TABLE `archive_integer_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_integer_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_isa`
---
-
-DROP TABLE IF EXISTS `archive_isa`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_isa` (
-  `child` int(10) unsigned NOT NULL,
-  `child_iversion` int(10) unsigned NOT NULL,
-  `parent` int(10) unsigned NOT NULL,
-  `direct` tinyint(1) DEFAULT 1,
-  KEY `parent` (`parent`),
-  KEY `child` (`child`,`child_iversion`),
-  CONSTRAINT `archive_isa_ibfk_1` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_isa_ibfk_2` FOREIGN KEY (`child`, `child_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_isa`
---
-
-LOCK TABLES `archive_isa` WRITE;
-/*!40000 ALTER TABLE `archive_isa` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_isa` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_name_data`
---
-
-DROP TABLE IF EXISTS `archive_name_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_name_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `value` (`value`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_name_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_name_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_name_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_name_data`
---
-
-LOCK TABLES `archive_name_data` WRITE;
-/*!40000 ALTER TABLE `archive_name_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_name_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_name_overrides`
---
-
-DROP TABLE IF EXISTS `archive_name_overrides`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_name_overrides` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  UNIQUE KEY `archive_name_overrides-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`),
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_name_overrides_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_name_overrides_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_name_overrides_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_name_overrides`
---
-
-LOCK TABLES `archive_name_overrides` WRITE;
-/*!40000 ALTER TABLE `archive_name_overrides` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_name_overrides` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_null_data`
---
-
-DROP TABLE IF EXISTS `archive_null_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_null_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_null_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_null_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_null_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_null_data`
---
-
-LOCK TABLES `archive_null_data` WRITE;
-/*!40000 ALTER TABLE `archive_null_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_null_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_query_template_def`
---
-
-DROP TABLE IF EXISTS `archive_query_template_def`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_query_template_def` (
-  `id` int(10) unsigned NOT NULL,
-  `definition` mediumtext COLLATE utf8_unicode_ci NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  PRIMARY KEY (`id`,`_iversion`),
-  CONSTRAINT `archive_query_template_def_ibfk_1` FOREIGN KEY (`id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_query_template_def`
---
-
-LOCK TABLES `archive_query_template_def` WRITE;
-/*!40000 ALTER TABLE `archive_query_template_def` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_query_template_def` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_reference_data`
---
-
-DROP TABLE IF EXISTS `archive_reference_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_reference_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` int(10) unsigned NOT NULL,
-  `value_iversion` int(10) unsigned DEFAULT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  KEY `value` (`value`),
-  CONSTRAINT `archive_reference_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_reference_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_reference_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_reference_data_ibfk_4` FOREIGN KEY (`value`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_reference_data`
---
-
-LOCK TABLES `archive_reference_data` WRITE;
-/*!40000 ALTER TABLE `archive_reference_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_reference_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `archive_text_data`
---
-
-DROP TABLE IF EXISTS `archive_text_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `archive_text_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` text COLLATE utf8_unicode_ci NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`),
-  KEY `domain_id_2` (`domain_id`,`_iversion`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `archive_text_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_text_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `archive_text_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `archive_text_data`
---
-
-LOCK TABLES `archive_text_data` WRITE;
-/*!40000 ALTER TABLE `archive_text_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `archive_text_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `collection_type`
---
-
-DROP TABLE IF EXISTS `collection_type`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `collection_type` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `collection` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  UNIQUE KEY `collection_type-d-e-p` (`domain_id`,`entity_id`,`property_id`),
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  CONSTRAINT `collection_type_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `collection_type_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `collection_type_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `collection_type`
---
-
-LOCK TABLES `collection_type` WRITE;
-/*!40000 ALTER TABLE `collection_type` DISABLE KEYS */;
-/*!40000 ALTER TABLE `collection_type` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `data_type`
---
-
-DROP TABLE IF EXISTS `data_type`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `data_type` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `datatype` int(10) unsigned NOT NULL,
-  UNIQUE KEY `datatype_ukey` (`domain_id`,`entity_id`,`property_id`),
-  KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `datatype_forkey_ent` (`entity_id`),
-  KEY `datatype_forkey_pro` (`property_id`),
-  KEY `datatype_forkey_type` (`datatype`),
-  CONSTRAINT `datatype_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `datatype_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `datatype_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `datatype_forkey_type` FOREIGN KEY (`datatype`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `data_type`
---
-
-LOCK TABLES `data_type` WRITE;
-/*!40000 ALTER TABLE `data_type` DISABLE KEYS */;
-INSERT INTO `data_type` VALUES (0,0,20,14),(0,0,21,14),(0,0,24,14),(0,0,100,14);
-/*!40000 ALTER TABLE `data_type` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `date_data`
---
-
-DROP TABLE IF EXISTS `date_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `date_data` (
-  `domain_id` int(10) unsigned DEFAULT NULL,
-  `entity_id` int(10) unsigned DEFAULT NULL,
-  `property_id` int(10) unsigned DEFAULT NULL,
-  `value` int(11) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  KEY `date_data_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `date_ov_forkey_ent` (`entity_id`),
-  KEY `date_ov_forkey_pro` (`property_id`),
-  CONSTRAINT `date_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `date_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `date_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `date_data`
---
-
-LOCK TABLES `date_data` WRITE;
-/*!40000 ALTER TABLE `date_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `date_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `datetime_data`
---
-
-DROP TABLE IF EXISTS `datetime_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `datetime_data` (
-  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
-  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  `value_ns` int(10) unsigned DEFAULT NULL,
-  `value` bigint(20) NOT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `dat_entity_id_entity` (`entity_id`),
-  KEY `dat_property_id_entity` (`property_id`),
-  CONSTRAINT `dat_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `dat_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `dat_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `datetime_data`
---
-
-LOCK TABLES `datetime_data` WRITE;
-/*!40000 ALTER TABLE `datetime_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `datetime_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `desc_overrides`
---
-
-DROP TABLE IF EXISTS `desc_overrides`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `desc_overrides` (
-  `domain_id` int(10) unsigned DEFAULT NULL,
-  `entity_id` int(10) unsigned DEFAULT NULL,
-  `property_id` int(10) unsigned DEFAULT NULL,
-  `description` text COLLATE utf8_unicode_ci DEFAULT NULL,
-  UNIQUE KEY `desc_ov_ukey` (`domain_id`,`entity_id`,`property_id`),
-  KEY `desc_ov_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `desc_ov_forkey_ent` (`entity_id`),
-  KEY `desc_ov_forkey_pro` (`property_id`),
-  CONSTRAINT `desc_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `desc_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `desc_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `desc_overrides`
---
-
-LOCK TABLES `desc_overrides` WRITE;
-/*!40000 ALTER TABLE `desc_overrides` DISABLE KEYS */;
-/*!40000 ALTER TABLE `desc_overrides` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `double_data`
---
-
-DROP TABLE IF EXISTS `double_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `double_data` (
-  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
-  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
-  `value` double NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  `unit_sig` bigint(20) DEFAULT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `dou_entity_id_entity` (`entity_id`),
-  KEY `dou_property_id_entity` (`property_id`),
-  CONSTRAINT `dou_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `dou_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `dou_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `double_data`
---
-
-LOCK TABLES `double_data` WRITE;
-/*!40000 ALTER TABLE `double_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `double_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `entities`
---
-
-DROP TABLE IF EXISTS `entities`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `entities` (
-  `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT 'Unique identifier.',
-  `description` text COLLATE utf8_unicode_ci DEFAULT NULL,
-  `role` enum('RECORDTYPE','RECORD','FILE','DOMAIN','PROPERTY','DATATYPE','ROLE','QUERYTEMPLATE') COLLATE utf8_unicode_ci NOT NULL,
-  `acl` int(10) unsigned DEFAULT NULL COMMENT 'Access Control List for the entity.',
-  PRIMARY KEY (`id`),
-  KEY `entity_entity_acl` (`acl`),
-  CONSTRAINT `entity_entity_acl` FOREIGN KEY (`acl`) REFERENCES `entity_acl` (`id`)
-) ENGINE=InnoDB AUTO_INCREMENT=103 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `entities`
---
-
-LOCK TABLES `entities` WRITE;
-/*!40000 ALTER TABLE `entities` DISABLE KEYS */;
-INSERT INTO `entities` VALUES (0,'The default domain.','ROLE',0),(1,'The default recordtype.','ROLE',0),(2,'The default record.','ROLE',0),(3,'The default file.','ROLE',0),(4,'The default property.','ROLE',0),(7,'The default datatype.','ROLE',0),(8,'The QueryTemplate role.','ROLE',0),(11,'The default reference data type.','DATATYPE',0),(12,'The default integer data type.','DATATYPE',0),(13,'The default double data type.','DATATYPE',0),(14,'The default text data type.','DATATYPE',0),(15,'The default datetime data type.','DATATYPE',0),(16,'The default timespan data type.','DATATYPE',0),(17,'The default file reference data type.','DATATYPE',0),(18,'The defaulf boolean data type','DATATYPE',0),(20,'Name of an entity','PROPERTY',0),(21,'Unit of an entity.','PROPERTY',0),(24,'Description of an entity.','PROPERTY',0),(50,'The SQLite file data type.','DATATYPE',0),(99,NULL,'RECORDTYPE',0),(100,'This is a unique key which should be only known to the pytest file that is used to run tests within this instance of CaosDB.','PROPERTY',2),(101,NULL,'RECORDTYPE',2),(102,NULL,'RECORD',2);
-/*!40000 ALTER TABLE `entities` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `entity_acl`
---
-
-DROP TABLE IF EXISTS `entity_acl`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `entity_acl` (
-  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
-  `acl` varbinary(65525) NOT NULL,
-  PRIMARY KEY (`id`),
-  KEY `entity_acl_acl` (`acl`(3072))
-) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `entity_acl`
---
-
-LOCK TABLES `entity_acl` WRITE;
-/*!40000 ALTER TABLE `entity_acl` DISABLE KEYS */;
-INSERT INTO `entity_acl` VALUES (0,''),(2,'[{\"realm\":\"PAM\",\"bitSet\":536608371,\"username\":\"admin\"}]');
-/*!40000 ALTER TABLE `entity_acl` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `entity_version`
---
-
-DROP TABLE IF EXISTS `entity_version`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `entity_version` (
-  `entity_id` int(10) unsigned NOT NULL,
-  `hash` varbinary(255) DEFAULT NULL,
-  `version` varbinary(255) NOT NULL,
-  `_iversion` int(10) unsigned NOT NULL,
-  `_ipparent` int(10) unsigned DEFAULT NULL,
-  `srid` varbinary(255) NOT NULL,
-  PRIMARY KEY (`entity_id`,`_iversion`),
-  UNIQUE KEY `entity_version-e-v` (`entity_id`,`version`),
-  KEY `srid` (`srid`),
-  CONSTRAINT `entity_version_ibfk_1` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE,
-  CONSTRAINT `entity_version_ibfk_2` FOREIGN KEY (`srid`) REFERENCES `transactions` (`srid`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `entity_version`
---
-
-LOCK TABLES `entity_version` WRITE;
-/*!40000 ALTER TABLE `entity_version` DISABLE KEYS */;
-INSERT INTO `entity_version` VALUES (0,NULL,'507b9d49fb5379a29f7214cf0e01785266f60caa',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(1,NULL,'94331f259172f041c6c2cadc367381f8adc8e13e',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(2,NULL,'97cf2265ee2438ebae4cb0ca4d567ad73ea3c439',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(3,NULL,'9d58d341bd5dfb27def78ead97da879480d0ff32',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(4,NULL,'7ccfd2bc86f0ea0d178f7bbc45d50aca320e49ce',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(7,NULL,'dfa7e8bb2308d095713b7c7ed133797934bbd786',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(8,NULL,'ee9c70057e457be52f0aecd86aa989f53a696dd8',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(11,NULL,'d198fe9ac5d91945019d8a37cfb1f04b11e8900b',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(12,NULL,'c792b402eeec2182b787f401d2a25ac59ce20d0c',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(13,NULL,'d3fc056e7fe063c5f1e9c039a157b3cf28a64bac',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(14,NULL,'621026c1a4d23dd53e92c96f922dd450e4d126dc',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(15,NULL,'419c2c18ac6522879aaaee83991b99ab71c2dcde',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(16,NULL,'a7561ff1d50e64b3a0faeb96ded9b5949fb8ccbc',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(17,NULL,'9540f72b8adb5d6c19ac1af4bc9f512ef46a0485',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(18,NULL,'e2156c6825353edf67dff833919706e8d19f8500',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(20,NULL,'3fddf8ff1cd074b1f0f393a4d2359c7da216e456',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(21,NULL,'8efa386bc8a3a673158476289fbab2ac7469cb21',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(24,NULL,'d0e0a82e89328da66105f98854256e273be32208',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(50,NULL,'2cebb0cb377f6de1216351e21cc32a0d2e866f19',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(99,NULL,'15b8ed5414e9bcd871acb2c62421c822a9154f66',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'),(100,NULL,'6fbec42f1bda31574472914ce0614b4ba447bd3a',1,NULL,'41dd8224-1e1a-4ad9-9d37-bad5841cb2a2'),(101,NULL,'bc922c549546af75ded1b2b6272c11825f78b5ed',1,NULL,'41dd8224-1e1a-4ad9-9d37-bad5841cb2a2'),(102,NULL,'63e3fffa8ad0ea5f900641f562f905991e149f33',1,NULL,'c2deb139-70f3-4ba6-bbef-40ae2e33ec7d');
-/*!40000 ALTER TABLE `entity_version` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `enum_data`
---
-
-DROP TABLE IF EXISTS `enum_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `enum_data` (
-  `domain_id` int(10) unsigned DEFAULT NULL,
-  `entity_id` int(10) unsigned DEFAULT NULL,
-  `property_id` int(10) unsigned DEFAULT NULL,
-  `value` varbinary(255) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  KEY `enum_ov_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `enum_ov_forkey_ent` (`entity_id`),
-  KEY `enum_ov_forkey_pro` (`property_id`),
-  CONSTRAINT `enum_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `enum_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `enum_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `enum_data`
---
-
-LOCK TABLES `enum_data` WRITE;
-/*!40000 ALTER TABLE `enum_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `enum_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `feature_config`
---
-
-DROP TABLE IF EXISTS `feature_config`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `feature_config` (
-  `_key` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `_value` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
-  PRIMARY KEY (`_key`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `feature_config`
---
-
-LOCK TABLES `feature_config` WRITE;
-/*!40000 ALTER TABLE `feature_config` DISABLE KEYS */;
-INSERT INTO `feature_config` VALUES ('ENTITY_VERSIONING','ENABLED');
-/*!40000 ALTER TABLE `feature_config` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `files`
---
-
-DROP TABLE IF EXISTS `files`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `files` (
-  `file_id` int(10) unsigned NOT NULL COMMENT 'The file''s ID.',
-  `path` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Directory of the file.',
-  `size` bigint(20) unsigned NOT NULL COMMENT 'Size in kB (oktet bytes).',
-  `hash` binary(64) DEFAULT NULL,
-  `checked_timestamp` bigint(20) NOT NULL DEFAULT 0,
-  PRIMARY KEY (`file_id`),
-  CONSTRAINT `fil_file_id_entity` FOREIGN KEY (`file_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `files`
---
-
-LOCK TABLES `files` WRITE;
-/*!40000 ALTER TABLE `files` DISABLE KEYS */;
-/*!40000 ALTER TABLE `files` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `groups`
---
-
-DROP TABLE IF EXISTS `groups`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `groups` (
-  `user_id` int(10) unsigned NOT NULL,
-  `group_id` int(10) unsigned NOT NULL,
-  UNIQUE KEY `user_id` (`user_id`,`group_id`),
-  KEY `group_id_entities_id` (`group_id`),
-  CONSTRAINT `group_id_entities_id` FOREIGN KEY (`group_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `user_id_entities_id` FOREIGN KEY (`user_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `groups`
---
-
-LOCK TABLES `groups` WRITE;
-/*!40000 ALTER TABLE `groups` DISABLE KEYS */;
-/*!40000 ALTER TABLE `groups` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `integer_data`
---
-
-DROP TABLE IF EXISTS `integer_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `integer_data` (
-  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
-  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
-  `value` bigint(20) NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  `unit_sig` bigint(20) DEFAULT NULL,
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `int_entity_id_entity` (`entity_id`),
-  KEY `int_property_id_entity` (`property_id`),
-  CONSTRAINT `int_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `int_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `int_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `integer_data`
---
-
-LOCK TABLES `integer_data` WRITE;
-/*!40000 ALTER TABLE `integer_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `integer_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `isa`
---
-
-DROP TABLE IF EXISTS `isa`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `isa` (
-  `child` int(10) unsigned NOT NULL COMMENT 'Child',
-  `parent` int(10) unsigned NOT NULL COMMENT 'Parent',
-  `type` enum('INHERITANCE','SUBTYPING') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Type of is-a relation.',
-  UNIQUE KEY `child` (`child`,`parent`),
-  KEY `parent_entity` (`parent`),
-  CONSTRAINT `child_entity` FOREIGN KEY (`child`) REFERENCES `entities` (`id`),
-  CONSTRAINT `parent_entity` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `isa`
---
-
-LOCK TABLES `isa` WRITE;
-/*!40000 ALTER TABLE `isa` DISABLE KEYS */;
-/*!40000 ALTER TABLE `isa` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `isa_cache`
---
-
-DROP TABLE IF EXISTS `isa_cache`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `isa_cache` (
-  `child` int(10) unsigned NOT NULL,
-  `parent` int(10) unsigned NOT NULL,
-  `rpath` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  PRIMARY KEY (`child`,`parent`,`rpath`),
-  KEY `isa_cache_parent_entity` (`parent`),
-  CONSTRAINT `isa_cache_child_entity` FOREIGN KEY (`child`) REFERENCES `entities` (`id`),
-  CONSTRAINT `isa_cache_parent_entity` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `isa_cache`
---
-
-LOCK TABLES `isa_cache` WRITE;
-/*!40000 ALTER TABLE `isa_cache` DISABLE KEYS */;
-INSERT INTO `isa_cache` VALUES (102,101,'102');
-/*!40000 ALTER TABLE `isa_cache` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `logging`
---
-
-DROP TABLE IF EXISTS `logging`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `logging` (
-  `level` int(11) NOT NULL,
-  `logger` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `message` mediumtext COLLATE utf8_unicode_ci NOT NULL,
-  `millis` bigint(20) NOT NULL,
-  `logRecord` blob NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `logging`
---
-
-LOCK TABLES `logging` WRITE;
-/*!40000 ALTER TABLE `logging` DISABLE KEYS */;
-/*!40000 ALTER TABLE `logging` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `name_data`
---
-
-DROP TABLE IF EXISTS `name_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `name_data` (
-  `domain_id` int(10) unsigned NOT NULL,
-  `entity_id` int(10) unsigned NOT NULL,
-  `property_id` int(10) unsigned NOT NULL,
-  `value` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL,
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  UNIQUE KEY `domain_id_2` (`domain_id`,`entity_id`,`property_id`),
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `entity_id` (`entity_id`),
-  KEY `property_id` (`property_id`),
-  KEY `value` (`value`),
-  CONSTRAINT `name_data_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `name_data_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `name_data_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `name_data`
---
-
-LOCK TABLES `name_data` WRITE;
-/*!40000 ALTER TABLE `name_data` DISABLE KEYS */;
-INSERT INTO `name_data` VALUES (0,0,20,'DOMAIN','FIX',0),(0,1,20,'RECORDTYPE','FIX',0),(0,2,20,'RECORD','FIX',0),(0,3,20,'FILE','FIX',0),(0,4,20,'PROPERTY','FIX',0),(0,7,20,'DATATYPE','FIX',0),(0,8,20,'QUERYTEMPLATE','FIX',0),(0,11,20,'REFERENCE','FIX',0),(0,12,20,'INTEGER','FIX',0),(0,13,20,'DOUBLE','FIX',0),(0,14,20,'TEXT','FIX',0),(0,15,20,'DATETIME','FIX',0),(0,16,20,'TIMESPAN','FIX',0),(0,17,20,'FILE','FIX',0),(0,18,20,'BOOLEAN','FIX',0),(0,20,20,'name','FIX',0),(0,21,20,'unit','FIX',0),(0,24,20,'description','FIX',0),(0,50,20,'SQLITE','FIX',0),(0,100,20,'TestIdentification','FIX',0),(0,101,20,'PyTestInfo','FIX',0);
-/*!40000 ALTER TABLE `name_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `name_overrides`
---
-
-DROP TABLE IF EXISTS `name_overrides`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `name_overrides` (
-  `domain_id` int(10) unsigned DEFAULT NULL,
-  `entity_id` int(10) unsigned DEFAULT NULL,
-  `property_id` int(10) unsigned DEFAULT NULL,
-  `name` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
-  UNIQUE KEY `name_ov_ukey` (`domain_id`,`entity_id`,`property_id`),
-  KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `name_ov_forkey_ent` (`entity_id`),
-  KEY `name_ov_forkey_pro` (`property_id`),
-  CONSTRAINT `name_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `name_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `name_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `name_overrides`
---
-
-LOCK TABLES `name_overrides` WRITE;
-/*!40000 ALTER TABLE `name_overrides` DISABLE KEYS */;
-/*!40000 ALTER TABLE `name_overrides` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `null_data`
---
-
-DROP TABLE IF EXISTS `null_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `null_data` (
-  `domain_id` int(10) unsigned DEFAULT NULL,
-  `entity_id` int(10) unsigned DEFAULT NULL,
-  `property_id` int(10) unsigned DEFAULT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  KEY `null_data_dom_ent_idx` (`domain_id`,`entity_id`),
-  KEY `null_forkey_ent` (`entity_id`),
-  KEY `null_forkey_pro` (`property_id`),
-  CONSTRAINT `null_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `null_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `null_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `null_data`
---
-
-LOCK TABLES `null_data` WRITE;
-/*!40000 ALTER TABLE `null_data` DISABLE KEYS */;
-INSERT INTO `null_data` VALUES (0,101,100,'OBLIGATORY',0);
-/*!40000 ALTER TABLE `null_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `passwd`
---
-
-DROP TABLE IF EXISTS `passwd`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `passwd` (
-  `principal` varbinary(255) NOT NULL,
-  `hash` varbinary(255) NOT NULL,
-  `alg` varchar(255) COLLATE utf8_unicode_ci DEFAULT 'SHA-512',
-  `it` int(10) unsigned DEFAULT 5000,
-  `salt` varbinary(255) NOT NULL,
-  PRIMARY KEY (`principal`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `passwd`
---
-
-LOCK TABLES `passwd` WRITE;
-/*!40000 ALTER TABLE `passwd` DISABLE KEYS */;
-/*!40000 ALTER TABLE `passwd` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `passwords`
---
-
-DROP TABLE IF EXISTS `passwords`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `passwords` (
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'User ID.',
-  `password` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Password.',
-  PRIMARY KEY (`entity_id`),
-  CONSTRAINT `use_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `passwords`
---
-
-LOCK TABLES `passwords` WRITE;
-/*!40000 ALTER TABLE `passwords` DISABLE KEYS */;
-INSERT INTO `passwords` VALUES (98,'37d7bd8a833261b4e4653644ee0a065f522b92b3738ca9ae2cb43a83844bf352c4a59c386a44965997a508c61988c9484c093775027425091d6d3d435c3c0e0c'),(99,'37d7bd8a833261b4e4653644ee0a065f522b92b3738ca9ae2cb43a83844bf352c4a59c386a44965997a508c61988c9484c093775027425091d6d3d435c3c0e0c');
-/*!40000 ALTER TABLE `passwords` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `permissions`
---
-
-DROP TABLE IF EXISTS `permissions`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `permissions` (
-  `role` varbinary(255) NOT NULL,
-  `permissions` mediumtext COLLATE utf8_unicode_ci NOT NULL,
-  PRIMARY KEY (`role`),
-  CONSTRAINT `perm_name_roles` FOREIGN KEY (`role`) REFERENCES `roles` (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `permissions`
---
-
-LOCK TABLES `permissions` WRITE;
-/*!40000 ALTER TABLE `permissions` DISABLE KEYS */;
-INSERT INTO `permissions` VALUES ('administration','[{\"grant\":\"true\",\"priority\":\"true\",\"permission\":\"*\"}]');
-/*!40000 ALTER TABLE `permissions` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `query_template_def`
---
-
-DROP TABLE IF EXISTS `query_template_def`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `query_template_def` (
-  `id` int(10) unsigned NOT NULL,
-  `definition` mediumtext COLLATE utf8_unicode_ci NOT NULL,
-  PRIMARY KEY (`id`),
-  CONSTRAINT `query_template_def_ibfk_1` FOREIGN KEY (`id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `query_template_def`
---
-
-LOCK TABLES `query_template_def` WRITE;
-/*!40000 ALTER TABLE `query_template_def` DISABLE KEYS */;
-/*!40000 ALTER TABLE `query_template_def` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `reference_data`
---
-
-DROP TABLE IF EXISTS `reference_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `reference_data` (
-  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
-  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
-  `value` int(10) unsigned NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  `value_iversion` int(10) unsigned DEFAULT NULL,
-  KEY `entity_id` (`entity_id`,`property_id`),
-  KEY `ref_domain_id_entity` (`domain_id`),
-  KEY `ref_property_id_entity` (`property_id`),
-  KEY `ref_value_entity` (`value`),
-  KEY `value` (`value`,`value_iversion`),
-  CONSTRAINT `ref_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `ref_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `ref_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `ref_value_entity` FOREIGN KEY (`value`) REFERENCES `entities` (`id`),
-  CONSTRAINT `reference_data_ibfk_1` FOREIGN KEY (`value`, `value_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `reference_data`
---
-
-LOCK TABLES `reference_data` WRITE;
-/*!40000 ALTER TABLE `reference_data` DISABLE KEYS */;
-/*!40000 ALTER TABLE `reference_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `roles`
---
-
-DROP TABLE IF EXISTS `roles`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `roles` (
-  `name` varbinary(255) NOT NULL,
-  `description` mediumtext COLLATE utf8_unicode_ci DEFAULT NULL,
-  PRIMARY KEY (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `roles`
---
-
-LOCK TABLES `roles` WRITE;
-/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
-INSERT INTO `roles` VALUES ('administration','Users with this role have unrestricted permissions.'),('anonymous','Users who did not authenticate themselves.');
-/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `stats`
---
-
-DROP TABLE IF EXISTS `stats`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `stats` (
-  `name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
-  `value` blob DEFAULT NULL,
-  PRIMARY KEY (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `stats`
---
-
-LOCK TABLES `stats` WRITE;
-/*!40000 ALTER TABLE `stats` DISABLE KEYS */;
-INSERT INTO `stats` VALUES ('RootBenchmark','��\0sr\0-org.caosdb.server.database.misc.RootBenchmark����Qk]\0\0xr\04org.caosdb.server.database.misc.TransactionBenchmark����Qk]\0J\0sinceL\0measurementst\0Ljava/util/Map;[\0stackTraceElementst\0[Ljava/lang/StackTraceElement;L\0\rsubBenchmarksq\0~\0xp\0\0~v��sr\0java.util.HashMap���`�\0F\0\nloadFactorI\0	thresholdxp?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xur\0[Ljava.lang.StackTraceElement;F*<<�\"9\0\0xp\0\0\0sr\0java.lang.StackTraceElementa	Ś&6݅\0B\0formatI\0\nlineNumberL\0classLoaderNamet\0Ljava/lang/String;L\0declaringClassq\0~\0\nL\0fileNameq\0~\0\nL\0\nmethodNameq\0~\0\nL\0\nmoduleNameq\0~\0\nL\0\rmoduleVersionq\0~\0\nxp\0\0Bpt\0java.lang.Threadt\0Thread.javat\0\rgetStackTracet\0	java.baset\011.0.13sq\0~\0	\0\0 t\0appt\04org.caosdb.server.database.misc.TransactionBenchmarkt\0TransactionBenchmark.javat\0<init>ppsq\0~\0	\0\0\0�q\0~\0t\0-org.caosdb.server.database.misc.RootBenchmarkq\0~\0q\0~\0ppsq\0~\0	\0\0q\0~\0q\0~\0q\0~\0t\0<clinit>ppsq\0~\0	\0\0<q\0~\0t\0org.caosdb.server.CaosDBServert\0CaosDBServer.javat\0initBackendppsq\0~\0	\0\0\0�q\0~\0q\0~\0q\0~\0t\0mainppsq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0Infosr\0,org.caosdb.server.database.misc.SubBenchmark����Qk]\0L\0nameq\0~\0\nxq\0~\0\0\0~v��$sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0\nsq\0~\0	\0\0Bpq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0	\0\0 q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0	\0\0\0�q\0~\0t\0,org.caosdb.server.database.misc.SubBenchmarkq\0~\0q\0~\0ppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0t\0getBenchmarkppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0	\0\0\0#q\0~\0t\02org.caosdb.server.transaction.TransactionInterfacet\0TransactionInterface.javat\0getTransactionBenchmarkppsq\0~\0	\0\0\0/q\0~\0q\0~\0.q\0~\0/t\0executeppsq\0~\0	\0\0\0�q\0~\0t\0org.caosdb.server.utils.Infot\0	Info.javat\0syncDatabaseppsq\0~\0	\0\0\0�q\0~\0t\0/org.caosdb.server.database.misc.RootBenchmark$1q\0~\0t\0runppsq\0~\0	\0\0=pq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0	SyncStatssq\0~\0\"\0\0~v��$sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0	sq\0~\0	\0\0Bpq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0	\0\0 q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0	\0\0\0�q\0~\0q\0~\0)q\0~\0q\0~\0ppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0	\0\0\0/q\0~\0q\0~\0.q\0~\0/q\0~\02ppsq\0~\0	\0\0\0�q\0~\0q\0~\04q\0~\05q\0~\06ppsq\0~\0	\0\0\0�q\0~\0q\0~\08q\0~\0q\0~\09ppsq\0~\0	\0\0=pq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0MySQLSyncStatssq\0~\0\"\0\0~v��%sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0sq\0~\0	\0\0Bpq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0	\0\0 q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0	\0\0\0�q\0~\0q\0~\0)q\0~\0q\0~\0ppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0	\0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0	\0\0\0�q\0~\0t\0-org.caosdb.server.database.BackendTransactiont\0BackendTransaction.javat\0getImplementationppsq\0~\0	\0\0\0+q\0~\0t\08org.caosdb.server.database.backend.transaction.SyncStatst\0SyncStats.javaq\0~\02ppsq\0~\0	\0\0\0�q\0~\0q\0~\0Tq\0~\0Ut\0executeTransactionppsq\0~\0	\0\0\00q\0~\0q\0~\0.q\0~\0/q\0~\02ppsq\0~\0	\0\0\0�q\0~\0q\0~\04q\0~\05q\0~\06ppsq\0~\0	\0\0\0�q\0~\0q\0~\08q\0~\0q\0~\09ppsq\0~\0	\0\0=pq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xq\0~\0Jxq\0~\0<xq\0~\0!x'),('TransactionBenchmark','��\0sr\00caosdb.server.database.misc.TransactionBenchmark�Cl=���E\0J\0sinceL\0acct\0Ljava/util/HashMap;L\0countsq\0~\0xp\0\0l���Wsr\0java.util.HashMap���`�\0F\0\nloadFactorI\0	thresholdxp?@\0\0\0\0\0w\0\0\0\0\0\0t\0	SyncStatssr\0java.lang.Long;��̏#�\0J\0valuexr\0java.lang.Number������\0\0xp\0\0\0\0\0\0\0t\0GetInfosq\0~\0\0\0\0\0\0\0 xsq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0q\0~\0sr\0java.lang.Integer⠤���8\0I\0valuexq\0~\0\0\0\0q\0~\0	sq\0~\0\0\0\0x');
-/*!40000 ALTER TABLE `stats` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `text_data`
---
-
-DROP TABLE IF EXISTS `text_data`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `text_data` (
-  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
-  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
-  `value` text COLLATE utf8_unicode_ci NOT NULL,
-  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
-  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
-  KEY `domain_id` (`domain_id`,`entity_id`),
-  KEY `str_entity_id_entity` (`entity_id`),
-  KEY `str_property_id_entity` (`property_id`),
-  CONSTRAINT `str_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `str_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
-  CONSTRAINT `str_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `text_data`
---
-
-LOCK TABLES `text_data` WRITE;
-/*!40000 ALTER TABLE `text_data` DISABLE KEYS */;
-INSERT INTO `text_data` VALUES (0,102,100,'10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2','FIX',0);
-/*!40000 ALTER TABLE `text_data` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `transaction_log`
---
-
-DROP TABLE IF EXISTS `transaction_log`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `transaction_log` (
-  `transaction` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Transaction.',
-  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity ID.',
-  `username` varbinary(255) NOT NULL,
-  `seconds` bigint(20) unsigned NOT NULL DEFAULT 0,
-  `nanos` int(10) unsigned NOT NULL DEFAULT 0,
-  `realm` varbinary(255) NOT NULL,
-  KEY `entity_id` (`entity_id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `transaction_log`
---
-
-LOCK TABLES `transaction_log` WRITE;
-/*!40000 ALTER TABLE `transaction_log` DISABLE KEYS */;
-INSERT INTO `transaction_log` VALUES ('Insert',100,'admin',1642667277,464000000,'PAM'),('Insert',101,'admin',1642667277,464000000,'PAM'),('Insert',102,'admin',1642667277,633000000,'PAM');
-/*!40000 ALTER TABLE `transaction_log` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `transactions`
---
-
-DROP TABLE IF EXISTS `transactions`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `transactions` (
-  `srid` varbinary(255) NOT NULL,
-  `username` varbinary(255) NOT NULL,
-  `realm` varbinary(255) NOT NULL,
-  `seconds` bigint(20) unsigned NOT NULL,
-  `nanos` int(10) unsigned NOT NULL,
-  PRIMARY KEY (`srid`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `transactions`
---
-
-LOCK TABLES `transactions` WRITE;
-/*!40000 ALTER TABLE `transactions` DISABLE KEYS */;
-INSERT INTO `transactions` VALUES ('41dd8224-1e1a-4ad9-9d37-bad5841cb2a2','admin','PAM',1642667277,464000000),('c2deb139-70f3-4ba6-bbef-40ae2e33ec7d','admin','PAM',1642667277,633000000),('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e','administration','CaosDB',0,0);
-/*!40000 ALTER TABLE `transactions` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `units_lin_con`
---
-
-DROP TABLE IF EXISTS `units_lin_con`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `units_lin_con` (
-  `signature_from` bigint(20) NOT NULL,
-  `signature_to` bigint(20) NOT NULL,
-  `a` decimal(65,30) NOT NULL,
-  `b_dividend` int(11) NOT NULL,
-  `b_divisor` int(11) NOT NULL,
-  `c` decimal(65,30) NOT NULL,
-  PRIMARY KEY (`signature_from`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `units_lin_con`
---
-
-LOCK TABLES `units_lin_con` WRITE;
-/*!40000 ALTER TABLE `units_lin_con` DISABLE KEYS */;
-/*!40000 ALTER TABLE `units_lin_con` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `user_info`
---
-
-DROP TABLE IF EXISTS `user_info`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `user_info` (
-  `realm` varbinary(255) NOT NULL,
-  `name` varbinary(255) NOT NULL,
-  `email` varbinary(255) DEFAULT NULL,
-  `status` enum('ACTIVE','INACTIVE') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'INACTIVE',
-  `entity` int(10) unsigned DEFAULT NULL,
-  PRIMARY KEY (`realm`,`name`),
-  KEY `subject_entity` (`entity`),
-  CONSTRAINT `subjects_ibfk_1` FOREIGN KEY (`entity`) REFERENCES `entities` (`id`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `user_info`
---
-
-LOCK TABLES `user_info` WRITE;
-/*!40000 ALTER TABLE `user_info` DISABLE KEYS */;
-/*!40000 ALTER TABLE `user_info` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `user_roles`
---
-
-DROP TABLE IF EXISTS `user_roles`;
-/*!40101 SET @saved_cs_client     = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `user_roles` (
-  `realm` varbinary(255) NOT NULL,
-  `user` varbinary(255) NOT NULL,
-  `role` varbinary(255) NOT NULL,
-  PRIMARY KEY (`realm`,`user`,`role`),
-  KEY `user_roles_ibfk_1` (`role`),
-  CONSTRAINT `user_roles_ibfk_1` FOREIGN KEY (`role`) REFERENCES `roles` (`name`)
-) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `user_roles`
---
-
-LOCK TABLES `user_roles` WRITE;
-/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
-/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Dumping routines for database 'caosdb'
---
-/*!50003 DROP FUNCTION IF EXISTS `CaosDBVersion` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `CaosDBVersion`() RETURNS varchar(255) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-RETURN 'v5.0.0' ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `constructDateTimeWhereClauseForColumn` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `constructDateTimeWhereClauseForColumn`(seconds_col VARCHAR(255), nanos_col VARCHAR(255), vDateTimeSecLow VARCHAR(255), vDateTimeNSLow VARCHAR(255), vDateTimeSecUpp VARCHAR(255), vDateTimeNSUpp VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-BEGIN
-
-    DECLARE isInterval BOOLEAN DEFAULT vDateTimeSecUpp IS NOT NULL or vDateTimeNSUpp IS NOT NULL;
-    DECLARE operator_prefix CHAR(1) DEFAULT LEFT(operator,1);
-
-    IF isInterval THEN
-        IF operator = '=' THEN
-            RETURN " 0=1";
-        ELSEIF operator = '!=' THEN
-            RETURN " 0=1";
-        ELSEIF operator = '>' or operator = '<=' THEN
-            RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecUpp);
-        ELSEIF operator = '<' or operator = '>=' THEN
-            RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecLow);
-        ELSEIF operator = "(" THEN
-            RETURN CONCAT(" ", seconds_col, ">=", vDateTimeSecLow, " AND ",seconds_col, "<", vDateTimeSecUpp);
-        ELSEIF operator = "!(" THEN
-            RETURN CONCAT(" ", seconds_col, "<", vDateTimeSecLow, " OR ", seconds_col, ">=", vDateTimeSecUpp);
-        END IF;
-    ELSE
-        IF operator = '=' THEN
-            RETURN CONCAT(" ",
-                seconds_col,
-                "=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, CONCAT(' AND ', nanos_col, ' IS NULL'), CONCAT(' AND ',
-                    nanos_col,
-                '=', vDateTimeNSLow)));
-        ELSEIF operator = '!=' THEN
-            RETURN CONCAT(" ",
-                seconds_col,
-                "!=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR ',
-                        nanos_col,
-                        '!=', vDateTimeNSLow)));
-        ELSEIF operator = '>' or operator = '<' THEN
-            RETURN CONCAT(" ",
-                seconds_col, operator, vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR (',seconds_col,'=', vDateTimeSecLow, ' AND ',nanos_col, operator, vDateTimeNSLow, ')')));
-        ELSEIF operator = '>=' or operator = '<=' THEN
-            RETURN CONCAT(
-                " ", seconds_col, operator, vDateTimeSecLow,
-                IF(vDateTimeNSLow IS NULL,
-                    '',
-                    CONCAT(
-                        ' AND (', seconds_col, operator_prefix, vDateTimeSecLow,
-                        ' OR ', nanos_col, operator, vDateTimeNSLow,
-                        ' OR ', nanos_col, ' IS NULL)')));
-        ELSEIF operator = "(" THEN
-            RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"=", vDateTimeSecLow),CONCAT(" ",seconds_col,"=",vDateTimeSecLow," AND ",nanos_col,"=",vDateTimeNSLow));
-        ELSEIF operator = "!(" THEN
-            RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"!=",vDateTimeSecLow, ""),CONCAT(" ",seconds_col,"!=",vDateTimeSecLow," OR ",nanos_col, " IS NULL OR ", nanos_col, "!=",vDateTimeNSLow));
-        END IF;
-    END IF;
-    return ' 0=1';
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `convert_unit` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `convert_unit`(unit_sig BIGINT, value DECIMAL(65,30)) RETURNS decimal(65,30)
-    DETERMINISTIC
-BEGIN
-    DECLARE ret DECIMAL(65,30) DEFAULT value;
-
-    SELECT (((value+a)*b_dividend)/b_divisor+c) INTO ret FROM units_lin_con WHERE signature_from=unit_sig;
-    RETURN ret;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `getAggValueWhereClause` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `getAggValueWhereClause`(entities VARCHAR(255), properties VARCHAR(255)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-BEGIN
-        RETURN CONCAT(" EXISTS (SELECT 1 FROM `", entities, "` AS ent WHERE ent.id = subdata.entity_id LIMIT 1)", IF(properties IS NOT NULL AND properties != '', CONCAT(" AND EXISTS (SELECT 1 FROM `", properties, "` as props WHERE props.id = subdata.property_id LIMIT 1)"),''));
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `getDateTimeWhereClause` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `getDateTimeWhereClause`(vDateTime VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-BEGIN
-    DECLARE sep_loc INTEGER DEFAULT LOCATE('--',vDateTime);
-    DECLARE vDateTimeLow VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',1), vDateTime);
-    DECLARE vDateTimeUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',-1), NULL);
-
-    DECLARE vDateTimeSecLow VARCHAR(255) DEFAULT SUBSTRING_INDEX(vDateTimeLow, 'UTC', 1);
-    DECLARE vDateTimeNSLow VARCHAR(255) DEFAULT IF(SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1)='',NULL,SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1));
-
-    DECLARE vDateTimeSecUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTimeUpp, 'UTC', 1), NULL);
-    DECLARE vDateTimeNSUpp VARCHAR(255) DEFAULT IF(sep_loc != 0 AND SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1)!='',SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1),NULL);
-
-
-    RETURN constructDateTimeWhereClauseForColumn("subdata.value", "subdata.value_ns", vDateTimeSecLow, vDateTimeNSLow, vDateTimeSecUpp, vDateTimeNSUpp, operator);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `getDateWhereClause` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `getDateWhereClause`(vDateTimeDotNotation VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-BEGIN
-    DECLARE isInterval INTEGER DEFAULT LOCATE('--',vDateTimeDotNotation);
-    
-    DECLARE vILB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', 1), vDateTimeDotNotation);
-    
-    DECLARE vEUB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', -1), NULL);
-    DECLARE vILB_Date INTEGER DEFAULT SUBSTRING_INDEX(vILB, '.', 1);
-    DECLARE vEUB_Date INTEGER DEFAULT SUBSTRING_INDEX(vEUB, '.', 1);
-    
-    DECLARE hasTime INTEGER DEFAULT LOCATE('.NULL.NULL',vILB);
-    
-    DECLARE dom INTEGER DEFAULT vILB_Date % 100;
-    
-    DECLARE mon INTEGER DEFAULT ((vILB_Date % 10000) - dom) / 100;
-    
-    DECLARE yea INTEGER DEFAULT (vILB_Date - (vILB_Date % 10000)) / 10000;
-
-    IF operator = '=' and hasTime != 0 THEN
-        RETURN CONCAT(" subdata.value=", vILB_Date);
-    ELSEIF operator = "!=" and hasTime != 0 THEN
-        IF mon != 0  and dom != 0 THEN
-            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100!=0");
-        ELSEIF mon != 0 THEN
-            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100=0 and subdata.value%10000!=0");
-        ELSE
-            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%10000=0");
-        END IF;
-    ELSEIF operator = "(" and hasTime != 0 THEN
-        IF mon != 0 and dom != 0 THEN
-            RETURN CONCAT(" subdata.value=", vILB_Date);
-        ELSEIF mon != 0 THEN
-            RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date, " and subdata.value%10000!=0)");
-        ELSE
-            RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date,")");
-        END IF;
-    ELSEIF operator = "!(" THEN
-        IF hasTime = 0 THEN
-            RETURN " 0=0";
-        END IF;
-        IF mon != 0 and dom != 0 THEN
-            RETURN CONCAT(" subdata.value!=",vILB_Date);
-        ELSEIF mon != 0 THEN
-            RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%100=0) OR ((subdata.value<", vILB_Date, " or subdata.value>", vEUB_Date, ") and subdata.value%100!=0)");
-        ELSE
-            RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%10000=0) OR ((subdata.value<", vILB_Date, " or subdata.value>=", vEUB_Date, ") and subdata.value%10000!=0)");
-        END IF;
-    ELSEIF operator = "<" THEN
-        IF mon != 0 and dom != 0 THEN
-            RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%100!=0 or (subdata.value<", yea*10000+mon*100, " and subdata.value%10000!=0) or (subdata.value<", yea*10000, " and subdata.value%10000=0))");
-        ELSEIF mon != 0 THEN
-            RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%10000!=0 or (subdata.value<", yea*10000, "))");
-        ELSE
-            RETURN CONCAT(" subdata.value<", vILB_Date);
-        END IF;
-    ELSEIF operator = ">" THEN
-        IF mon != 0 and dom != 0 THEN
-            RETURN CONCAT(" subdata.value>", vILB_Date);
-        ELSEIF mon != 0 THEN
-            RETURN CONCAT(" subdata.value>=",vEUB_Date);
-        ELSE
-            RETURN CONCAT(" subdata.value>=",vEUB_Date);
-        END IF;
-    ELSEIF operator = "<=" THEN
-        IF mon != 0 and dom != 0 THEN
-            
-            RETURN CONCAT(" subdata.value<=", vILB_Date,
-                          " or (subdata.value<=", yea*10000 + mon*100, " and subdata.value%100=0)");
-        ELSEIF mon != 0 THEN
-            
-            RETURN CONCAT(" subdata.value<", vEUB_Date);
-        ELSE
-            
-            RETURN CONCAT(" subdata.value<", vEUB_Date);
-        END IF;
-    ELSEIF operator = ">=" THEN
-        IF mon != 0 and dom != 0 THEN
-            
-            RETURN CONCAT(" subdata.value>=", vILB_Date,
-                          " or (subdata.value>=", yea*10000 + mon*100, " and subdata.value%100=0)",
-                          " or (subdata.value>=", yea*10000, " and subdata.value%10000=0)");
-        ELSEIF mon != 0 THEN
-            
-            RETURN CONCAT(" subdata.value>=", yea*10000 + mon*100,
-                          " or (subdata.value>=", yea*10000, " and subdata.value%10000=0)");
-        ELSE
-            
-            RETURN CONCAT(" subdata.value>=", yea*10000);
-        END IF;
-    END IF;
-
-    return ' 0=1';
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `getDoubleWhereClause` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `getDoubleWhereClause`(value DOUBLE, unit_sig BIGINT, valueStdUnit DECIMAL(65,30), stdUnit_sig BIGINT, o CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    DETERMINISTIC
-BEGIN
-    RETURN IF(unit_sig IS NULL AND value IS NOT NULL, 
-        CONCAT('subdata.value ', o, ' \'', value, '\''), 
-        CONCAT(
-            IF(value IS NULL, '', 
-                CONCAT('(subdata.unit_sig=', unit_sig, ' AND subdata.value ', o, ' \'', value, '\') OR ')), 
-        	IF(unit_sig = stdUnit_sig,'',CONCAT('(subdata.unit_sig=', stdUnit_sig,' AND subdata.value ', o, ' \'', valueStdUnit, '\') OR ')),'(standard_unit(subdata.unit_sig)=', stdUnit_sig,' AND convert_unit(subdata.unit_sig,subdata.value) ', o, ' ', valueStdUnit, ')')); 
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `get_head_relative` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `get_head_relative`(EntityID INT UNSIGNED,
-    Offset INT UNSIGNED) RETURNS varbinary(255)
-    READS SQL DATA
-BEGIN
-    
-    
-    
-    
-    RETURN (
-        SELECT e.version
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID
-            ORDER BY e._iversion DESC
-            LIMIT 1 OFFSET Offset
-        );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `get_head_version` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `get_head_version`(EntityID INT UNSIGNED) RETURNS varbinary(255)
-    READS SQL DATA
-BEGIN
-    RETURN get_head_relative(EntityID, 0);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `get_iversion` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `get_iversion`(EntityID INT UNSIGNED,
-    Version VARBINARY(255)) RETURNS int(10) unsigned
-    READS SQL DATA
-BEGIN
-    RETURN (
-        SELECT e._iversion
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID
-                AND e.version = Version
-        );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `get_primary_parent_version` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `get_primary_parent_version`(EntityID INT UNSIGNED,
-    Version VARBINARY(255)) RETURNS varbinary(255)
-    READS SQL DATA
-BEGIN
-    RETURN (
-        SELECT p.version
-            FROM entity_version AS e INNER JOIN entity_version AS p
-                ON (e._ipparent = p._iversion
-                    AND e.entity_id = p.entity_id)
-            WHERE e.entity_id = EntityID
-                AND e.version = Version
-        );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `get_version_timestamp` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `get_version_timestamp`(EntityID INT UNSIGNED,
-    Version VARBINARY(255)) RETURNS varchar(255) CHARSET utf8 COLLATE utf8_unicode_ci
-    READS SQL DATA
-BEGIN
-    RETURN (
-        SELECT concat(t.seconds, '.', t.nanos)
-            FROM entity_version AS e INNER JOIN transactions AS t
-                ON ( e.srid = t.srid )
-            WHERE e.entity_id = EntityID
-            AND e.version = Version
-    );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `is_feature_config` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `is_feature_config`(_Key VARCHAR(255),
-    Expected VARCHAR(255)) RETURNS tinyint(1)
-    READS SQL DATA
-BEGIN
-    RETURN (
-        SELECT f._value = Expected FROM feature_config as f WHERE f._key = _Key
-    );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `makeStmt` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `makeStmt`(sourceSet VARCHAR(255), targetSet VARCHAR(255), data VARCHAR(20000),
-                                properties VARCHAR(20000), versioned BOOLEAN) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
-    NO SQL
-BEGIN
-        IF sourceSet = "entities" AND versioned THEN
-            RETURN CONCAT('INSERT IGNORE INTO `',
-                targetSet,
-                '` (id, _iversion) SELECT entity_id, _iversion FROM ',
-                data,
-                IF(properties IS NULL, '',
-                    CONCAT(' AS data JOIN `', properties, '` AS prop ON (data.property_id = prop.id) WHERE ',
-                           'data.entity_id = prop.id2 OR prop.id2 = 0')));
-        END IF;
-        RETURN CONCAT(
-            IF(targetSet IS NULL,
-                CONCAT('DELETE FROM `',sourceSet,'` WHERE NOT EXISTS (SELECT 1 FROM '), 
-                CONCAT('INSERT IGNORE INTO `',targetSet,'` (id) SELECT id FROM `',sourceSet,'` ',
-                       'WHERE EXISTS (SELECT 1 FROM ')),
-            IF(properties IS NULL,
-                CONCAT(data,' as data WHERE '),
-                CONCAT('`',properties,'` as prop JOIN ',data,' as data ON (data.property_id=prop.id) WHERE ',
-                       '(data.entity_id=prop.id2 OR prop.id2=0) AND ')),
-            'data.entity_id=`', sourceSet, '`.`id` LIMIT 1)'
-        );
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `ms` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `ms`(ps bigint(20) unsigned  ) RETURNS double
-return  TRUNCATE(ps/1000000000,3) ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `standard_unit` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `standard_unit`(unit_sig BIGINT) RETURNS bigint(20)
-    DETERMINISTIC
-BEGIN
-    DECLARE ret BIGINT DEFAULT unit_sig;
-
-    SELECT signature_to INTO ret FROM units_lin_con WHERE signature_from=unit_sig;
-    RETURN ret;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `_get_head_iversion` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `_get_head_iversion`(EntityID INT UNSIGNED) RETURNS int(10) unsigned
-    READS SQL DATA
-BEGIN
-    
-    
-    
-    
-    RETURN (
-        SELECT e._iversion
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID
-            ORDER BY e._iversion DESC
-            LIMIT 1
-        );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP FUNCTION IF EXISTS `_get_version` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` FUNCTION `_get_version`(EntityID INT UNSIGNED,
-    IVersion INT UNSIGNED) RETURNS varbinary(255)
-    READS SQL DATA
-BEGIN
-    RETURN (
-        SELECT version FROM entity_version
-            WHERE entity_id = EntityID
-            AND _iversion = IVersion
-        );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applyBackReference` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyBackReference`(in sourceSet VARCHAR(255), targetSet VARCHAR(255),
-    in propertiesTable VARCHAR(255), in entitiesTable VARCHAR(255), in subQuery BOOLEAN,
-    in versioned BOOLEAN)
-BEGIN
-    DECLARE newTableName VARCHAR(255) DEFAULT NULL;
-
-
-    IF subQuery IS TRUE THEN
-        call registerTempTableName(newTableName);
-
-        SET @createBackRefSubQueryTableStr = CONCAT('CREATE TEMPORARY TABLE `',newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (id, entity_id))');
-
-        PREPARE createBackRefSubQueryTable FROM @createBackRefSubQueryTableStr;
-        EXECUTE createBackRefSubQueryTable;
-        DEALLOCATE PREPARE createBackRefSubQueryTable;
-
-        SET @backRefSubResultSetStmtStr = CONCAT('INSERT IGNORE INTO `',
-            newTableName,
-            '` (id,entity_id) SELECT entity_id AS id, value AS entity_id FROM `reference_data` AS data ',
-            'WHERE EXISTS (SELECT 1 FROM `',
-                sourceSet,
-                '` AS source WHERE source.id=data.value LIMIT 1)',
-            IF(propertiesTable IS NULL,
-                '',
-                CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                    propertiesTable,
-                    '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-            IF(entitiesTable IS NULL,
-                '',
-                CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                    entitiesTable,
-                    '` AS e WHERE e.id=data.entity_id LIMIT 1)'))
-        );
-
-        PREPARE backRefSubResultSetStmt FROM @backRefSubResultSetStmtStr;
-        EXECUTE backRefSubResultSetStmt;
-        DEALLOCATE PREPARE backRefSubResultSetStmt;
-
-        SELECT newTableName as list;
-    ELSE
-        IF versioned THEN
-            IF sourceSet = "entities" THEN
-                
-                SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `',
-                    targetSet,
-                    '` (id, _iversion) SELECT source.id, _get_head_iversion(source.id)',
-                    
-                    ' FROM entities AS source WHERE EXISTS (',
-                        'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND (',
-                            'data.value_iversion IS NULL OR data.value_iversion=_get_head_iversion(source.id))',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-                    ') UNION ALL ',
-                    
-                    'SELECT source.id, source._iversion FROM archive_entities AS source WHERE EXISTS (',
-                        'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND ',
-                          '(data.value_iversion IS NULL OR data.value_iversion=source._iversion)',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-
-                    ')');
-            ELSEIF targetSet IS NULL OR sourceSet = targetSet THEN
-                SET @stmtBackRefStr = CONCAT('DELETE FROM `',
-                    sourceSet,
-                    '` WHERE NOT EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=`',
-                    sourceSet,
-                    '`.`id` AND ( data.value_iversion IS NULL OR data.value_iversion=`',
-                    sourceSet,
-                    '`._iversion)',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-                    ')');
-            ELSE
-                SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `',
-                    targetSet,
-                    '` (id, _iversion) SELECT source.id, source._iversion FROM `',
-                    sourceSet,
-                    '` AS source WHERE EXISTS (',
-                    'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND',
-                    ' (data.value_iversion IS NULL OR data.value_iversion=source._iversion)',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-
-                    ')');
-            END IF;
-        ELSE
-            
-            IF targetSet IS NULL OR sourceSet = targetSet THEN
-                
-                SET @stmtBackRefStr = CONCAT('DELETE FROM `',
-                    sourceSet,
-                    '` WHERE NOT EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=`',
-                    sourceSet,
-                    '`.`id`',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT('
-                            AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT('
-                            AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-                    ')');
-            ELSE
-                
-                SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `',
-                    targetSet,
-                    '` (id) SELECT id FROM `',
-                    sourceSet,
-                    '` AS source WHERE EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id',
-                    IF(entitiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            entitiesTable,
-                            '` AS e WHERE e.id=data.entity_id LIMIT 1)')),
-                    IF(propertiesTable IS NULL,
-                        '',
-                        CONCAT(' AND EXISTS (SELECT 1 FROM `',
-                            propertiesTable,
-                            '` AS p WHERE p.id=data.property_id LIMIT 1)')),
-                    ')');
-            END IF;
-        END IF;
-
-        PREPARE stmtBackRef FROM @stmtBackRefStr;
-        EXECUTE stmtBackRef;
-        DEALLOCATE PREPARE stmtBackRef;
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applyIDFilter` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyIDFilter`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255),
-    in o CHAR(2), in vInt BIGINT, in agg CHAR(3), in versioned BOOLEAN)
-IDFILTER_LABEL: BEGIN
-DECLARE data VARCHAR(20000) DEFAULT NULL;
-DECLARE aggVal VARCHAR(255) DEFAULT NULL;
-
-
-IF agg IS NOT NULL THEN
-    IF versioned THEN
-        
-        SELECT 1 FROM id_agg_with_versioning_not_implemented;
-    END IF;
-    SET @stmtIDAggValStr = CONCAT(
-        "SELECT ",
-        agg,
-        "(id) INTO @sAggVal FROM `",
-        sourceSet,
-        "`");
-    PREPARE stmtIDAggVal FROM @stmtIDAggValStr;
-    EXECUTE stmtIDAggVal;
-    DEALLOCATE PREPARE stmtIDAggVal;
-    SET aggVal = @sAggVal;
-END IF;
-
-
-IF targetSet IS NULL OR targetSet = sourceSet THEN
-    SET data = CONCAT(
-        "DELETE FROM `",
-        sourceSet,
-        "` WHERE ",
-        IF(o IS NULL OR vInt IS NULL,
-            "1=1",
-            CONCAT("NOT id",
-                o,
-                vInt)),
-        IF(aggVal IS NULL,
-            "",
-            CONCAT(" AND id!=",
-                aggVal)));
-ELSEIF versioned AND sourceSet = "entities" THEN
-
-    
-    SET data = CONCAT(
-        "INSERT IGNORE INTO `",
-        targetSet,
-        '` (id, _iversion) SELECT id, _get_head_iversion(id) FROM `entities` WHERE ',
-        IF(o IS NULL OR vInt IS NULL,
-            "1=1",
-            CONCAT("id",
-                o,
-                vInt)),
-        IF(aggVal IS NULL,
-            "",
-            CONCAT(" AND id=",
-                aggVal)),
-        ' UNION SELECT id, _iversion FROM `archive_entities` WHERE ',
-        IF(o IS NULL OR vInt IS NULL,
-            "1=1",
-            CONCAT("id",
-                o,
-                vInt)),
-        IF(aggVal IS NULL,
-            "",
-            CONCAT(" AND id=",
-                aggVal)));
-    
-
-ELSE
-    SET data = CONCAT(
-        "INSERT IGNORE INTO `",
-        targetSet,
-        IF(versioned,
-            '` (id, _iversion) SELECT data.id, data._iversion FROM `',
-            '` (id) SELECT data.id FROM `'),
-        sourceSet,
-        "` AS data WHERE ",
-        IF(o IS NULL OR vInt IS NULL,
-            "1=1",
-            CONCAT("data.id",
-                o,
-                vInt)),
-        IF(aggVal IS NULL,
-            "",
-            CONCAT(" AND data.id=",
-                aggVal)));
-END IF;
-
-Set @stmtIDFilterStr = data;
-PREPARE stmtIDFilter FROM @stmtIDFilterStr;
-EXECUTE stmtIDFilter;
-DEALLOCATE PREPARE stmtIDFilter;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applyPOV` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyPOV`(in sourceSet VARCHAR(255), 
-                                 in targetSet VARCHAR(255), 
-                                 in propertiesTable VARCHAR(255),
-                                 in refIdsTable VARCHAR(255),
-                                 in o CHAR(4), 
-                                 in vText VARCHAR(255),  
-                                 in vInt INT,  
-                                 in vDouble DOUBLE,  
-                                 in unit_sig BIGINT,
-                                 in vDoubleStdUnit DOUBLE,
-                                 in stdUnit_sig BIGINT,
-                                 in vDateTime VARCHAR(255),
-                                 in vDateTimeDotNotation VARCHAR(255),
-                                 in agg CHAR(3), 
-                                 in pname VARCHAR(255), 
-                                 in versioned BOOLEAN)
-POV_LABEL: BEGIN
-    DECLARE data TEXT DEFAULT NULL; 
-    DECLARE sTextData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sNameData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sEnumData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sIntData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sDoubleData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sDatetimeData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sNullData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sDateData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE sRefData VARCHAR(20000) DEFAULT NULL; 
-    DECLARE aggValue VARCHAR(255) DEFAULT NULL;
-    DECLARE aggValueWhereClause VARCHAR(20000) DEFAULT NULL;
-    DECLARE distinctUnits INT DEFAULT 0;
-    DECLARE usedStdUnit BIGINT DEFAULT NULL;
-    DECLARE keepTabl VARCHAR(255) DEFAULT NULL;
-    DECLARE existence_op VARCHAR(255) DEFAULT "EXISTS";
-
-    IF o = '->' THEN
-        
-        call applyRefPOV(sourceSet,targetSet, propertiesTable, refIdsTable, versioned);
-        LEAVE POV_LABEL;
-    ELSEIF o = '0' THEN
-        
-        
-        SET vText = NULL;
-        SET sTextData = 'SELECT domain_id, entity_id, property_id FROM `null_data` AS subdata';
-
-        
-
-    ELSEIF o = '!0' THEN
-        
-        
-        SET vText = NULL;
-        
-        SET sTextData = CONCAT(
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL UNION ALL ',
-        'SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata ',
-            'WHERE subdata.value IS NOT NULL');
-
-    ELSEIF o = "(" or o = "!(" THEN  
-        IF versioned THEN
-            SET sTextData = IF(vText IS NULL,
-            CONCAT(
-                ' SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) ',
-                'AS _iversion, property_id FROM `date_data` UNION ALL ',
-                'SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data`'),
-            IF(vDateTimeDotNotation IS NULL, NULL,  
-                CONCAT(' SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) ',
-                       'AS _iversion, property_id FROM `date_data` AS subdata WHERE ',
-                       getDateWhereClause(vDateTimeDotNotation, o), ' UNION ALL ',
-                       'SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data` ',
-                       'AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation, o))));
-            SET sDatetimeData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data`',
-                                                  IF(vDateTime IS NULL, NULL,
-                                                                        CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o), ' UNION ALL SELECT DISTINCT domain_id, entity_id,_iversion, property_id FROM `archive_datetime_data` AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o))));
-        ELSE  
-            SET sTextData = IF(vText IS NULL,
-                ' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`',
-                IF(vDateTimeDotNotation IS NULL, NULL,
-                    CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ',
-                             getDateWhereClause(vDateTimeDotNotation, o))));
-            SET sDatetimeData = IF(vText IS NULL,
-                ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`',
-                IF(vDateTime IS NULL, NULL,
-                   CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` ',
-                           'AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o))));
-        END IF;
-        SET vText = NULL;
-    ELSEIF agg IS NOT NULL THEN
-        IF versioned THEN
-            SELECT 1 FROM versioned_agg_pov_filter_not_implemented;
-        END IF;
-        
-
-        
-        SET aggValueWhereClause = CONCAT(getDoubleWhereClause(vDouble, unit_sig, vDoubleStdUnit, stdUnit_sig, o), ' AND ');
-        SET aggValueWhereClause = CONCAT(IF(aggValueWhereClause IS NULL, '', aggValueWhereClause), getAggValueWhereClause(sourceSet, propertiesTable));
-
-        
-        SET @aggValueStmtStr = CONCAT('SELECT ',agg,'(subdata.value), ', agg, '(convert_unit(subdata.unit_sig,subdata.value)), COUNT(DISTINCT standard_unit(subdata.unit_sig)), max(standard_unit(subdata.unit_sig)) INTO @sAggValue, @sAggValueConvert, @distinctUnits, @StdUnitSig FROM (SELECT entity_id, property_id, value, unit_sig FROM `integer_data` UNION ALL SELECT entity_id, property_id, value, unit_sig FROM `double_data`) AS subdata WHERE ', aggValueWhereClause);
-
-        
-        PREPARE stmtAggValueStmt FROM @aggValueStmtStr;
-        EXECUTE stmtAggValueStmt;
-        DEALLOCATE PREPARE stmtAggValueStmt;
-
-        SET distinctUnits = @distinctUnits;
-        SET aggValue = @sAggValue;
-
-        
-        IF distinctUnits = 1 THEN
-            SET aggValue = @sAggValueConvert;
-            SET usedStdUnit = @StdUnitSig;
-        ELSE
-            call raiseWarning(CONCAT("The filter POV(",IF(pname IS NULL, 'NULL', pname),",",IF(o IS NULL, 'NULL', o),",",IF(vText IS NULL, 'NULL', vText),") with the aggregate function '", agg, "' could not match the values against each other with their units. The values had different base units. Only their numric value had been taken into account." ));
-        END IF;
-
-        IF aggValue IS NULL THEN
-            SET sTextData = 'SELECT NULL as domain_id, NULL as entity_id, NULL as property_id';
-        ELSE
-            SET sTextData = '';
-            SET sIntData = CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '='));
-            SET sDoubleData = CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '='));
-        END IF;
-
-        SET vText = NULL;
-    ELSE
-        
-        IF versioned THEN
-            SET sTextData = IF(vText IS NULL,
-            'SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `text_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_text_data` ',
-            CONCAT(
-            'SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id ',
-              'FROM `text_data` AS subdata WHERE subdata.value ', o,' ? ',
-            'UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id ',
-              'FROM `archive_text_data` AS subdata WHERE subdata.value ', o, '?'
-            ));
-            SET sNameData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `name_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_name_data` ', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `name_data` AS subdata WHERE subdata.value ', o, ' ? UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_name_data` AS subdata WHERE subdata.value ', o, '?'));
-            SET sEnumData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `enum_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_enum_data` ', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `enum_data` AS subdata WHERE subdata.value ', o, ' ? UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_enum_data` AS subdata WHERE subdata.value ', o, '?'));
-            IF o = "!=" AND refIdsTable IS NOT NULL THEN
-                SET existence_op = "NOT EXISTS";
-            END IF;
-            SET sRefData = IF(vText IS NULL,
-                ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `reference_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_reference_data`',
-                IF(refIdsTable IS NULL,
-                    NULL,
-                    CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `reference_data` AS subdata WHERE ', existence_op, ' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT" UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_reference_data` AS subdata WHERE ', existence_op, ' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT"')));
-            SET sDoubleData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, _get_head_iversion(subdata.entity_id) AS _iversion, subdata.property_id FROM `double_data` AS subdata UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_double_data` ', IF(vDouble IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id), property_id FROM `double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble,unit_sig,vDoubleStdUnit,stdUnit_sig,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble, unit_sig, vDoubleStdUnit, stdUnit_sig, o))));
-            SET sIntData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, _get_head_iversion(subdata.entity_id) AS _iversion, subdata.property_id FROM `integer_data` AS subdata UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_integer_data`', IF(vInt IS NULL AND vDoubleStdUnit IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o))));
-            SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o))));
-            SET sDateData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `date_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `date_data` AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data` AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation,o))));
-            SET sNullData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `null_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_null_data`', NULL);
-        
-        ELSE
-            SET sTextData = IF(vText IS NULL, 'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data`', CONCAT('SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata WHERE subdata.value ',o,' ?'));
-            SET sNameData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data`', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data` AS subdata WHERE subdata.value ', o, ' ?'));
-            SET sEnumData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data`', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata WHERE subdata.value ', o, ' ?'));
-            IF o = "!=" AND refIdsTable IS NOT NULL THEN
-                SET existence_op = "NOT EXISTS";
-            END IF;
-            SET sRefData = IF(vText IS NULL,
-                ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data`',
-                IF(refIdsTable IS NULL,
-                    NULL,
-                    CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata WHERE ',existence_op ,' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT"')));
-            SET sDoubleData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `double_data` AS subdata', IF(vDouble IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble,unit_sig,vDoubleStdUnit,stdUnit_sig,o))));
-            SET sIntData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `integer_data` AS subdata', IF(vInt IS NULL AND vDoubleStdUnit IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o))));
-            SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o))));
-            SET sDateData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ',getDateWhereClause(vDateTimeDotNotation,o))));
-            SET sNullData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `null_data`', NULL);
-        END IF;
-
-    END IF;
-
-    SET data = CONCAT('(',sTextData,
-                IF(sNameData IS NULL, '', sNameData),
-                IF(sEnumData IS NULL, '', sEnumData),
-                IF(sDoubleData IS NULL, '', sDoubleData),
-                IF(sIntData IS NULL, '', sIntData),
-                IF(sDatetimeData IS NULL, '', sDatetimeData),
-                IF(sDateData IS NULL, '', sDateData),
-                IF(sRefData IS NULL, '', sRefData),
-                IF(sNullData IS NULL, '', sNullData),
-                ')'
-            );
-
-
-    call createTmpTable(keepTabl, versioned);
-    IF versioned THEN
-        
-        SET @stmtPOVkeepTblStr = CONCAT(
-            'INSERT IGNORE INTO `', keepTabl, '` (id, _iversion) SELECT entity_id AS id, _iversion FROM ', data,
-            ' as data', IF(propertiesTable IS NULL, '', CONCAT(
-                ' WHERE EXISTS (Select 1 from `', propertiesTable, '` AS prop ',
-                  'WHERE prop.id = data.property_id AND (prop.id2=data.entity_id OR prop.id2=0))')));
-
-        IF targetSet IS NOT NULL THEN
-            SET @stmtPOVStr = CONCAT('INSERT IGNORE INTO `',
-                    targetSet,
-                    '` (id, _iversion) SELECT source.id, source._iversion FROM `',
-                    keepTabl,
-                    '` AS source');
-        ELSE
-        
-            SET @stmtPOVStr = CONCAT('DELETE FROM `',
-                    sourceSet,
-                    '` WHERE NOT EXISTS (SELECT 1 FROM `',
-                    keepTabl,
-                    '` AS data WHERE data.id=`',
-                    sourceSet,
-                    '`.`id` AND data._iversion=`',
-                    sourceSet,
-                    '`._iversion LIMIT 1)');
-
-        END IF;
-
-        
-        PREPARE stmt3 FROM @stmtPOVStr;
-        PREPARE stmtPOVkeepTbl FROM @stmtPOVkeepTblStr;
-        IF vText IS NULL THEN
-            EXECUTE stmtPOVkeepTbl;
-        ELSE
-            SET @vText = vText;
-            EXECUTE stmtPOVkeepTbl USING @vText, @vText, @vText, @vText, @vText, @vText;
-        END IF;
-        EXECUTE stmt3;
-        DEALLOCATE PREPARE stmt3;
-        DEALLOCATE PREPARE stmtPOVkeepTbl;
-    ELSE
-        
-        SET @stmtPOVkeepTblStr = CONCAT('INSERT IGNORE INTO `', keepTabl, '` (id) SELECT DISTINCT entity_id AS id FROM ', data, ' as data', IF(propertiesTable IS NULL, '', CONCAT(' WHERE EXISTS (Select 1 from `', propertiesTable, '` AS prop WHERE prop.id = data.property_id AND (prop.id2=data.entity_id OR prop.id2=0))')));
-
-        SET @stmtPOVStr = CONCAT(
-                IF(targetSet IS NULL,
-                    CONCAT('DELETE FROM `',
-                        sourceSet,
-                        '` WHERE NOT EXISTS (SELECT 1 FROM `'),
-                    CONCAT('INSERT IGNORE INTO `',
-                        targetSet,
-                        '` (id) SELECT id FROM `',
-                        sourceSet,
-                        '` WHERE EXISTS (SELECT 1 FROM `')),
-                keepTabl,
-                '` AS data WHERE data.id=`',
-                sourceSet,
-                '`.`id` LIMIT 1)'
-            );
-
-        
-        PREPARE stmt3 FROM @stmtPOVStr;
-        PREPARE stmtPOVkeepTbl FROM @stmtPOVkeepTblStr;
-        IF vText IS NULL THEN
-            EXECUTE stmtPOVkeepTbl;
-        ELSE
-            SET @vText = vText;
-            EXECUTE stmtPOVkeepTbl USING @vText, @vText, @vText;
-        END IF;
-        EXECUTE stmt3;
-        DEALLOCATE PREPARE stmt3;
-        DEALLOCATE PREPARE stmtPOVkeepTbl;
-    END IF;
-
-    SELECT @stmtPOVkeepTblStr as applyPOVStmt1, @stmtPOVStr as applyPOVStmt2, keepTabl as applyPOVIntermediateResultSet;
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applyRefPOV` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyRefPOV`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255),
-                                    in properties VARCHAR(255), in refs VARCHAR(255),
-                                    in versioned BOOLEAN)
-BEGIN
-    DECLARE data VARCHAR(20000) DEFAULT CONCAT(
-        '(SELECT domain_id, entity_id, property_id FROM `reference_data` AS subdata ',
-        'WHERE EXISTS (SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1))');
-
-    IF versioned THEN
-        SET data = CONCAT(
-            '(SELECT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id ',
-                'FROM `reference_data` AS subdata WHERE EXISTS (',
-                    'SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1) ',
-            'UNION ALL SELECT domain_id, entity_id, _iversion, property_id ',
-                'FROM `archive_reference_data` AS subdata WHERE EXISTS (',
-                    'SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1))');
-    END IF;
-    SET @stmtRefPOVStr = makeStmt(sourceSet,targetSet,data,properties, versioned);
-
-    PREPARE stmt4 FROM @stmtRefPOVStr;
-    EXECUTE stmt4;
-
-    SELECT @stmtRefPOVstr as applyRefPOVStmt;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applySAT` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applySAT`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in loc MEDIUMTEXT, in op CHAR(5))
-BEGIN
-
-    IF targetSet IS NULL OR sourceSet = targetSet THEN
-        SET @stmtSATString = CONCAT('DELETE FROM `', sourceSet, '` WHERE id NOT IN (SELECT file_id FROM files WHERE path ', op, ' ?)');  
-    ELSE
-        SET @stmtSATString = CONCAT('INSERT INTO `', targetSet, '` (id) SELECT data.id FROM `',sourceSet,'` as data WHERE EXISTS (SELECT 1 FROM `files` as f WHERE f.file_id=data.id AND f.path ', op, ' ?)');
-    END IF;
-    PREPARE stmtSAT FROM @stmtSATString;
-	SET @loc = loc;
-    EXECUTE stmtSAT USING @loc;
-    DEALLOCATE PREPARE stmtSAT;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `applyTransactionFilter` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyTransactionFilter`(in sourceSet VARCHAR(255), targetSet VARCHAR(255), in transaction VARCHAR(255), in operator_u CHAR(2), in realm VARCHAR(255), in userName VARCHAR(255), in ilb BIGINT, in ilb_nanos INT UNSIGNED, in eub BIGINT, in eub_nanos INT UNSIGNED, in operator_t CHAR(2))
-BEGIN
-	DECLARE data TEXT default CONCAT('(SELECT entity_id FROM transaction_log AS t WHERE t.transaction=\'', 
-		transaction, 
-		'\'',
-		IF(userName IS NOT NULL, 
-			CONCAT(' AND t.realm', operator_u, '? AND t.username', operator_u, '?'),
-			'' 
-		),
-		IF(ilb IS NOT NULL, 
-			CONCAT(" AND", constructDateTimeWhereClauseForColumn("t.seconds", "t.nanos", ilb, ilb_nanos, eub, eub_nanos, operator_t)),
-			""
-		),
-		')'
-	);
-
-	SET @stmtTransactionStr = makeStmt(sourceSet, targetSet, data, NULL, FALSE);
-	PREPARE stmtTransactionFilter from @stmtTransactionStr;
-	IF userName IS NOT NULL THEN
-		SET @userName = userName;
-		SET @realm = realm;
-		EXECUTE stmtTransactionFilter USING @realm, @userName;
-	ELSE
-		EXECUTE stmtTransactionFilter;
-	END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `calcComplementUnion` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcComplementUnion`(in targetSet VARCHAR(255), in subResultSet VARCHAR(255), in universe VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    IF versioned AND universe = "entities" THEN
-        SET @stmtComplementUnionStr = CONCAT(
-            'INSERT IGNORE INTO `', targetSet,
-            '` SELECT e.id, _get_head_iversion(e.id) FROM entities as e WHERE NOT EXISTS ( SELECT 1 FROM `',
-            subResultSet,
-            '` AS diff WHERE diff.id=e.id AND diff._iversion = _get_head_iversion(e.id)) UNION ALL SELECT e.id, e._iversion FROM archive_entities AS e WHERE NOT EXISTS ( SELECT 1 FROM `',
-            subResultSet,
-            '` as diff WHERE e.id = diff.id AND e._iversion = diff._iversion)');
-    ELSEIF versioned THEN
-        SET @stmtComplementUnionStr = CONCAT(
-            'INSERT IGNORE INTO `', targetSet,
-            '` SELECT id FROM `',universe,
-            '` AS universe WHERE NOT EXISTS ( SELECT 1 FROM `',
-                subResultSet,'`
-                AS diff WHERE diff.id=universe.id AND diff._iversion = universe.id_version)');
-    ELSE
-        SET @stmtComplementUnionStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` SELECT id FROM `',universe, '` AS universe WHERE NOT EXISTS ( SELECT 1 FROM `', subResultSet,'` AS diff WHERE diff.id=universe.id)');
-    END IF;
-    PREPARE stmtComplementUnion FROM @stmtComplementUnionStr;
-    EXECUTE stmtComplementUnion;
-    DEALLOCATE PREPARE stmtComplementUnion;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `calcDifference` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcDifference`(in resultSetTable VARCHAR(255), in diff VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    IF versioned THEN
-        SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE EXISTS ( SELECT 1 FROM `', diff,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id` AND diff._iversion=`', resultSetTable, '`.`_iversion`)');
-    ELSE
-        SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE EXISTS ( SELECT 1 FROM `', diff,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)');
-    END IF;
-    PREPARE diffStmt FROM @diffStmtStr;
-    EXECUTE diffStmt;
-    DEALLOCATE PREPARE diffStmt;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `calcIntersection` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcIntersection`(in resultSetTable VARCHAR(255), in intersectWith VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    IF versioned THEN
-        SET @diffStmtStr = CONCAT('DELETE FROM `',
-            resultSetTable,
-            '` WHERE NOT EXISTS ( SELECT 1 FROM `',
-            intersectWith,
-            '` AS diff WHERE diff.id=`',
-            resultSetTable,
-            '`.`id` AND diff._iversion=`',
-            resultSetTable,
-            '`.`_iversion`)');
-    ELSE
-        SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE NOT EXISTS ( SELECT 1 FROM `', intersectWith,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)');
-    END IF;
-    PREPARE diffStmt FROM @diffStmtStr;
-    EXECUTE diffStmt;
-
-    
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `calcUnion` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcUnion`(in targetSet VARCHAR(255), in sourceSet VARCHAR(255))
-BEGIN
-    SET @diffStmtStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` SELECT * FROM `',sourceSet,'`');
-    PREPARE diffStmt FROM @diffStmtStr;
-    EXECUTE diffStmt;
-    DEALLOCATE PREPARE diffStmt;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `cleanUpLinCon` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `cleanUpLinCon`()
-BEGIN
-
-    DELETE FROM units_lin_con WHERE NOT EXISTS (SELECT '1' FROM double_data WHERE unit_sig=signature_from) AND NOT EXISTS (SELECT '1' FROM integer_data WHERE unit_sig=signature_from);
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `cleanUpQuery` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `cleanUpQuery`()
-BEGIN
-    CREATE TEMPORARY TABLE IF NOT EXISTS warnings (warning TEXT NOT NULL);
-    SELECT * from warnings;
-
-    SET @pstmtstr = CONCAT('DROP TEMPORARY TABLE IF EXISTS `warnings`',
-        IF(@tempTableList IS NULL, '', CONCAT(',',@tempTableList)));
-    PREPARE pstmt FROM @pstmtstr;
-    EXECUTE pstmt;
-
-    SET @tempTableList = NULL;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `copyTable` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `copyTable`(in fromTable VARCHAR(255), in toTable VARCHAR(255))
-BEGIN
-    SET @copyTableStmtStr = CONCAT('INSERT IGNORE INTO `', toTable, '` (id) SELECT id FROM `', fromTable, '`');
-    PREPARE copyTableStmt FROM @copyTableStmtStr;
-    EXECUTE copyTableStmt;
-    DEALLOCATE PREPARE copyTableStmt;
-    
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `createTmpTable`(out newTableName VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    call registerTempTableName(newTableName);
-
-    IF versioned THEN
-        SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,
-            '` ( id INT UNSIGNED, _iversion INT UNSIGNED, PRIMARY KEY (id, _iversion))' );
-    ELSE
-        SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( id INT UNSIGNED PRIMARY KEY)' );
-    END IF;
-
-    PREPARE createTableStmt FROM @createTableStmtStr; 
-    EXECUTE createTableStmt;
-    DEALLOCATE PREPARE createTableStmt;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable2` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `createTmpTable2`(out newTableName VARCHAR(255))
-BEGIN
-    call registerTempTableName(newTableName);
-    SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,
-        '` ( id INT UNSIGNED, id2 INT UNSIGNED, domain INT UNSIGNED, CONSTRAINT `',
-        newTableName,'PK` PRIMARY KEY (id,id2,domain) )' );
-
-    PREPARE createTableStmt FROM @createTableStmtStr; 
-    EXECUTE createTableStmt;
-    DEALLOCATE PREPARE createTableStmt;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `deleteEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteEntity`(in EntityID INT UNSIGNED)
-BEGIN
-
-    
-    DELETE FROM files where file_id=EntityID;
-
-    
-    DELETE FROM data_type
-        WHERE ( domain_id = 0
-            AND entity_id = 0
-            AND property_id = EntityID )
-        OR datatype = EntityID;
-    DELETE FROM collection_type
-        WHERE domain_id = 0
-        AND entity_id = 0
-        AND property_id = EntityID;
-
-    
-    DELETE FROM name_data
-        WHERE domain_id = 0
-        AND entity_id = EntityID
-        AND property_id = 20;
-
-    DELETE FROM entities where id=EntityID;
-
-    
-    DELETE FROM entity_acl
-        WHERE NOT EXISTS (
-            SELECT 1 FROM entities
-            WHERE entities.acl = entity_acl.id LIMIT 1)
-        AND NOT EXISTS (
-            SELECT 1 FROM archive_entities
-            WHERE archive_entities.acl = entity_acl.id LIMIT 1);
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `deleteEntityProperties` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteEntityProperties`(in EntityID INT UNSIGNED)
-BEGIN
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-
-    CALL deleteIsa(EntityID);
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        SELECT max(e._iversion) INTO IVersion 
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID;
-
-        
-        INSERT INTO archive_reference_data (domain_id, entity_id,
-                property_id, value, value_iversion, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, value_iversion,
-                status, pidx, IVersion AS _iversion
-            FROM reference_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_null_data (domain_id, entity_id,
-                property_id, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, status,
-                pidx, IVersion AS _iversion
-            FROM null_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_text_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion
-            FROM text_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_name_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion
-            FROM name_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_enum_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion
-            FROM enum_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_integer_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion, unit_sig)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion, unit_sig
-            FROM integer_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_double_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion, unit_sig)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion, unit_sig
-            FROM double_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_datetime_data (domain_id, entity_id,
-                property_id, value, value_ns, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, value_ns,
-                status, pidx, IVersion AS _iversion
-            FROM datetime_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_date_data (domain_id, entity_id,
-                property_id, value, status, pidx, _iversion)
-            SELECT domain_id, entity_id, property_id, value, status,
-                pidx, IVersion AS _iversion
-            FROM date_data
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_name_overrides (domain_id, entity_id,
-                property_id, name, _iversion)
-            SELECT domain_id, entity_id, property_id, name,
-                IVersion AS _iversion
-            FROM name_overrides
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_desc_overrides (domain_id, entity_id,
-                property_id, description, _iversion)
-            SELECT domain_id, entity_id, property_id, description,
-                IVersion AS _iversion
-            FROM desc_overrides
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_data_type (domain_id, entity_id,
-                property_id, datatype, _iversion)
-            SELECT domain_id, entity_id, property_id, datatype,
-                IVersion AS _iversion
-            FROM data_type
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_collection_type (domain_id, entity_id,
-                property_id, collection, _iversion)
-            SELECT domain_id, entity_id, property_id, collection,
-                IVersion AS _iversion
-            FROM collection_type
-            WHERE (domain_id = 0 AND entity_id = EntityID)
-            OR domain_id = EntityID;
-
-        INSERT INTO archive_query_template_def (id, definition, _iversion)
-            SELECT id, definition, IVersion AS _iversion
-            FROM query_template_def
-            WHERE id = EntityID;
-
-    END IF;
-
-    DELETE FROM reference_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM null_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM text_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM name_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM enum_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM integer_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM double_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM datetime_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM date_data
-    where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-
-    DELETE FROM name_overrides
-    WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM desc_overrides
-    WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-
-    DELETE FROM data_type
-    WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-    DELETE FROM collection_type
-    WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
-
-    DELETE FROM query_template_def WHERE id=EntityID;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `deleteIsa` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteIsa`(IN EntityID INT UNSIGNED)
-BEGIN
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        SELECT max(_iversion) INTO IVersion
-            FROM entity_version
-            WHERE entity_id = EntityID;
-
-        
-        INSERT IGNORE INTO archive_isa (child, child_iversion, parent, direct)
-            SELECT e.child, IVersion AS child_iversion, e.parent, rpath = EntityID
-            FROM isa_cache AS e
-            WHERE e.child = EntityID;
-    END IF;
-
-    DELETE FROM isa_cache
-        WHERE child = EntityID
-        OR rpath = EntityID
-        OR rpath LIKE concat('%>', EntityID)
-        OR rpath LIKE concat('%>', EntityID, '>%');
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `deleteLinCon` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteLinCon`(in sig BIGINT)
-BEGIN
-
-    DELETE FROM units_lin_con WHERE signature_from=sig;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `delete_all_entity_versions` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `delete_all_entity_versions`(
-    in EntityID INT UNSIGNED)
-BEGIN
-
-    DELETE FROM entity_version WHERE entity_id = EntityID;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `entityACL` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `entityACL`(out ACLID INT UNSIGNED, in ACLSTR VARBINARY(65525))
-BEGIN
-   SELECT id INTO ACLID FROM entity_acl as t WHERE t.acl=ACLSTR LIMIT 1;
-   IF ACLID IS NULL THEN
-        INSERT INTO entity_acl (acl) VALUES (ACLSTR);
-        SET ACLID = LAST_INSERT_ID();
-   END IF;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `finishNegationFilter` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `finishNegationFilter`(in resultSetTable VARCHAR(255), in diff VARCHAR(255))
-BEGIN
-    call calcDifference(resultSetTable, diff);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `finishSubProperty` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `finishSubProperty`(in sourceSet VARCHAR(255),in targetSet VARCHAR(255),
-                                          in list VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    DECLARE data VARCHAR(20000) DEFAULT CONCAT('`',list,'`');
-    SET @finishSubPropertyStmtStr = makeStmt(sourceSet, targetSet, data, NULL, versioned);
-
-    PREPARE finishSubPropertyStmt FROM @finishSubPropertyStmtStr;
-    EXECUTE finishSubPropertyStmt;
-    DEALLOCATE PREPARE finishSubPropertyStmt;
-
-    SELECT @finishSubPropertyStmtStr AS finishSubPropertyStmt;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getChildren` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getChildren`(in tableName varchar(255), in versioned BOOLEAN)
-BEGIN
-    DECLARE found_children INT UNSIGNED DEFAULT 0;
-
-    DROP TEMPORARY TABLE IF EXISTS dependTemp;
-    CREATE TEMPORARY TABLE dependTemp (id INT UNSIGNED, _iversion INT UNSIGNED, PRIMARY KEY(id, _iversion));
-
-
-    SET @initDepend = CONCAT(
-        'INSERT IGNORE INTO dependTemp (id, _iversion) SELECT i.child, ',
-        IF(versioned,
-            '_get_head_iversion(i.child)',
-            '0'),
-        ' FROM isa_cache AS i INNER JOIN `',
-        tableName,
-        '` AS t ON (i.parent=t.id);');
-    PREPARE initDependStmt FROM @initDepend;
-
-    EXECUTE initDependStmt;
-    SET found_children = found_children + ROW_COUNT();
-
-    
-
-    IF versioned IS TRUE THEN
-        SET @initDepend = CONCAT(
-            'INSERT IGNORE INTO dependTemp (id, _iversion) ',
-            'SELECT i.child, i.child_iversion FROM archive_isa AS i INNER JOIN `',
-            tableName,
-            '` AS t ON (i.parent=t.id);');
-        PREPARE initDependStmt FROM @initDepend;
-
-        EXECUTE initDependStmt;
-        SET found_children = found_children + ROW_COUNT();
-    END IF;
-
-    
-
-
-    IF found_children != 0 THEN
-        SET @transfer = CONCAT(
-            'INSERT IGNORE INTO `',
-            tableName,
-            IF(versioned,
-                '` (id, _iversion) SELECT id, _iversion FROM dependTemp',
-                '` (id) SELECT id FROM dependTemp'));
-        PREPARE transferstmt FROM @transfer;
-        EXECUTE transferstmt;
-        DEALLOCATE PREPARE transferstmt;
-    END IF;
-
-
-    DEALLOCATE PREPARE initDependStmt;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getDependentEntities` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getDependentEntities`(in EntityID INT UNSIGNED)
-BEGIN
-
-DROP TEMPORARY TABLE IF EXISTS refering;		
-CREATE TEMPORARY TABLE refering (
-id INT UNSIGNED UNIQUE
-);
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM reference_data WHERE (value=EntityID OR property_id=EntityID) AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM reference_data WHERE (value=EntityID OR property_id=EntityID) AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM text_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM text_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM enum_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM enum_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM name_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM name_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM integer_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM integer_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM double_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM double_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM datetime_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM datetime_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM date_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM date_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id FROM null_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id FROM null_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
-
-INSERT IGNORE INTO refering (id) SELECT entity_id from data_type WHERE datatype=EntityID AND domain_id=0 AND entity_id!=EntityID;
-INSERT IGNORE INTO refering (id) SELECT domain_id from data_type WHERE datatype=EntityID;
-
-
-Select id from refering WHERE id!=0 and id!=EntityID;
-
-DROP TEMPORARY TABLE refering;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getFile` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8mb4 */ ;
-/*!50003 SET character_set_results = utf8mb4 */ ;
-/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getFile`(in FileID INT)
-BEGIN 
-
-Select name, description, role into @name, @description, @role from entities where id=FileID LIMIT 1;
-
-IF @role = 'file' Then
-		Select path, hash, size into @FilePath, @FileHash, @FileSize from files where file_id=FileID LIMIT 1;
-		Select timestamp, user_id, user_agent into @FileCreated, @FileCreator, @FileGenerator from history where entity_id=FileID AND event='insertion' LIMIT 1;
-
-Select 
-FileID as FileID,
-@FilePath as FilePath,
-@FileSize as FileSize,
-@FileHash as FileHash,
-@FileDescription as FileDescription,
-@FileCreated as FileCreated,
-@FileCreator as FileCreator,
-@FileGenerator as FileGenerator,
-NULL	as FileOwner,
-NULL as FilePermission,
-NULL as FileChecksum;
-
-END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getFileIdByPath` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getFileIdByPath`(in FilePath VARCHAR(255))
-BEGIN 
-
-Select file_id as FileID from files where path=FilePath LIMIT 1;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getRole` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getRole`(in RoleName VARCHAR(255))
-BEGIN
-
-Select e.id INTO @RoleID from entities e where e.name=RoleName AND e.role=RoleName LIMIT 1;
-
-call retrieveEntity(@RoleID);
-
-
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `getRules` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8mb4 */ ;
-/*!50003 SET character_set_results = utf8mb4 */ ;
-/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `getRules`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in TransType VARCHAR(255))
-BEGIN
-
-		
-		
-		
-SELECT rules.transaction, rules.criterion, rules.modus from rules where if(DomainID is null, rules.domain_id=0,rules.domain_id=DomainID) AND if(EntityID is null, rules.entity_id=0,rules.entity_id=EntityID) AND if(TransType is null,true=true,rules.transaction=TransType);
-
-
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `get_version_history` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `get_version_history`(
-    in EntityID INT UNSIGNED)
-BEGIN
-    
-    SELECT c.version AS child,
-            NULL as parent,
-            t.seconds AS child_seconds,
-            t.nanos AS child_nanos,
-            t.username AS child_username,
-            t.realm AS child_realm
-        FROM entity_version AS c INNER JOIN transactions as t
-        ON ( c.srid = t.srid )
-        WHERE c.entity_id = EntityID
-        AND c._ipparent is Null
-
-    
-    
-    
-
-    
-    UNION SELECT c.version AS child,
-            p.version AS parent,
-            t.seconds AS child_seconds,
-            t.nanos AS child_nanos,
-            t.username AS child_username,
-            t.realm AS child_realm
-        FROM entity_version AS p
-            INNER JOIN entity_version as c
-            INNER JOIN transactions AS t
-            ON (c._ipparent = p._iversion
-                AND c.entity_id = p.entity_id
-                AND t.srid = c.srid)
-        WHERE p.entity_id = EntityID;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initAutoIncrement` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initAutoIncrement`()
-BEGIN
-
-    SELECT @max := MAX(entity_id)+ 1 FROM transaction_log; 
-    IF @max IS NOT NULL THEN
-        SET @stmtStr = CONCAT('ALTER TABLE entities AUTO_INCREMENT=',@max);
-        PREPARE stmt FROM @stmtStr;
-        EXECUTE stmt;
-        DEALLOCATE PREPARE stmt;
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initBackReference` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initBackReference`(in pid INT UNSIGNED, in pname VARCHAR(255), in entity_id INT UNSIGNED, in ename VARCHAR(255))
-BEGIN
-    DECLARE propertiesTable VARCHAR(255) DEFAULT NULL;
-    DECLARE entitiesTable VARCHAR(255) DEFAULT NULL;
-
-    IF pname IS NOT NULL THEN
-        
-        call createTmpTable(propertiesTable, FALSE);
-        call initSubEntity(pid, pname, propertiesTable);
-    END IF;
-
-    IF ename IS NOT NULL THEN
-        
-        call createTmpTable(entitiesTable, FALSE);
-        call initSubEntity(entity_id, ename, entitiesTable);
-    END IF;
-
-    SELECT propertiesTable, entitiesTable;
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initDisjunctionFilter` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initDisjunctionFilter`(in versioned BOOLEAN)
-BEGIN
-    call initEmptyTargetSet(NULL, versioned);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initEmptyTargetSet` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initEmptyTargetSet`(in targetSet VARCHAR(255), in versioned BOOLEAN)
-BEGIN
-    DECLARE newTableName VARCHAR(255) DEFAULT targetSet;
-    IF targetSet IS NOT NULL THEN
-        SET @isNotEmptyVar = NULL; 
-        SET @isEmptyStmtStr = CONCAT("SELECT 1 INTO @isNotEmptyVar FROM `",targetSet,"` LIMIT 1");
-        PREPARE stmtIsNotEmpty FROM @isEmptyStmtStr;
-        EXECUTE stmtIsNotEmpty;
-        DEALLOCATE PREPARE stmtIsNotEmpty;
-        IF @isNotEmptyVar IS NOT NULL THEN 
-            call createTmpTable(newTableName, versioned);
-        END IF;
-    ELSE
-        call createTmpTable(newTableName, versioned);
-    END IF;
-    SELECT newTableName AS newTableName;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initEntity`(in eid INT UNSIGNED, in ename VARCHAR(255),
-                                   in enameLike VARCHAR(255), in enameRegexp VARCHAR(255),
-                                   in resultset VARCHAR(255), in versioned BOOLEAN)
-initEntityLabel: BEGIN
-    DECLARE select_columns VARCHAR(255) DEFAULT '` (id) SELECT entity_id FROM name_data ';
-    SET @initEntityStmtStr = NULL;
-
-    
-    
-    IF versioned IS TRUE THEN
-        SET select_columns = '` (id, _iversion) SELECT entity_id, _get_head_iversion(entity_id) FROM name_data ';
-    END IF;
-    IF ename IS NOT NULL THEN
-        SET @initEntityStmtStr = CONCAT(
-            'INSERT IGNORE INTO `',
-            resultset,
-            select_columns,
-            'WHERE value=?; ');
-        SET @query_param = ename;
-    ELSEIF enameLike IS NOT NULL THEN
-        SET @initEntityStmtStr = CONCAT(
-            'INSERT IGNORE INTO `',
-            resultset,
-            select_columns,
-            'WHERE value LIKE ?;');
-        SET @query_param = enameLike;
-    ELSEIF enameRegexp IS NOT NULL THEN 
-        SET @initEntityStmtStr = CONCAT(
-            'INSERT IGNORE INTO `',
-            resultset,
-            select_columns,
-            'WHERE value REGEXP ?;');
-        SET @query_param = enameRegexp;
-    END IF;
-
-    
-    IF @initEntityStmtStr IS NOT NULL THEN
-        PREPARE initEntityStmt FROM @initEntityStmtStr;
-        EXECUTE initEntityStmt USING @query_param;
-        DEALLOCATE PREPARE initEntityStmt;
-    END IF;
-
-    IF eid IS NOT NULL THEN
-        
-        SET @initEntityStmtStr = CONCAT(
-            'INSERT IGNORE INTO `',
-            resultset,
-            IF(versioned,
-                '` (id, _iversion) SELECT id, _get_head_iversion(id) ',
-                '` (id) SELECT id '),
-            'FROM entities WHERE id=',eid,';');
-        PREPARE initEntityStmt FROM @initEntityStmtStr;
-        EXECUTE initEntityStmt;
-        DEALLOCATE PREPARE initEntityStmt;
-    END IF;
-
-
-    
-    
-    IF versioned IS TRUE THEN
-        SET select_columns = '` (id, _iversion) SELECT entity_id, _iversion FROM archive_name_data ';
-        IF ename IS NOT NULL THEN
-            SET @initEntityStmtStr = CONCAT(
-                'INSERT IGNORE INTO `',
-                resultset,
-                select_columns,
-                'WHERE value=?; ');
-            SET @query_param = ename;
-        ELSEIF enameLike IS NOT NULL THEN
-            SET @initEntityStmtStr = CONCAT(
-                'INSERT IGNORE INTO `',
-                resultset,
-                select_columns,
-                'WHERE value LIKE ?;');
-            SET @query_param = enameLike;
-        ELSEIF enameRegexp IS NOT NULL THEN
-            SET @initEntityStmtStr = CONCAT(
-                'INSERT IGNORE INTO `',
-                resultset,
-                'WHERE value REGEXP ?;');
-            SET @query_param = enameRegexp;
-        END IF;
-
-        
-        IF @initEntityStmtStr IS NOT NULL THEN
-            PREPARE initEntityStmt FROM @initEntityStmtStr;
-            EXECUTE initEntityStmt USING @query_param;
-            DEALLOCATE PREPARE initEntityStmt;
-        END IF;
-    END IF;
-    
-
-
-    IF @initEntityStmtStr IS NOT NULL THEN
-        call getChildren(resultset, versioned);
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initPOVPropertiesTable` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initPOVPropertiesTable`(in pid INT UNSIGNED, in pname VARCHAR(255), in sourceSet VARCHAR(255))
-BEGIN
-    DECLARE propertiesTable VARCHAR(255) DEFAULT NULL; 
-    DECLARE replTbl VARCHAR(255) DEFAULT NULL;
-    DECLARE ecount INT DEFAULT 0;
-    DECLARE t1 BIGINT DEFAULT 0;
-    DECLARE t2 BIGINT DEFAULT 0;
-    DECLARE t3 BIGINT DEFAULT 0;
-    DECLARE t4 BIGINT DEFAULT 0;
-    DECLARE t5 BIGINT DEFAULT 0;
-    DECLARE t6 BIGINT DEFAULT 0;
-
-
-    IF pname is NOT NULL THEN 
-        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t1 from (select uuid() uid) as alias;
-        call createTmpTable2(propertiesTable);
-
-        
-        SET @initPOVPropertiesTableStmt1 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) SELECT property_id, entity_id, domain_id from name_overrides WHERE name = ? UNION ALL SELECT entity_id, domain_id, 0 FROM name_data WHERE value = ?;');
-        PREPARE stmt FROM @initPOVPropertiesTableStmt1;
-        SET @pname = pname;
-        EXECUTE stmt USING @pname, @pname;
-        SET ecount = ROW_COUNT();
-
-        
-        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t2 from (select uuid() uid) as alias;
-        IF pid IS NOT NULL THEN
-            SET @initPOVPropertiesTableStmt2 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) VALUES (?, 0, 0)');
-            PREPARE stmt FROM @initPOVPropertiesTableStmt2;
-            SET @pid = pid;
-            EXECUTE stmt USING @pid;
-            SET ecount = ecount + ROW_COUNT();
-        END IF;
-
-        
-        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t3 from (select uuid() uid) as alias;
-        IF ecount > 0 THEN
-            
-            call getChildren(propertiesTable, False);
-        END IF;
-
-        
-        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t4 from (select uuid() uid) as alias;
-        IF ecount > 0 THEN
-            call createTmpTable2(replTbl);
-            SET @replTblStmt1 := CONCAT('INSERT IGNORE INTO `',replTbl, '` (id, id2, domain) SELECT r.value as id, r.entity_id as id2, 0 as domain_id FROM reference_data AS r WHERE status="REPLACEMENT" AND domain_id=0 AND EXISTS (SELECT * FROM `', sourceSet, '` AS s WHERE s.id=r.entity_id) AND EXISTS (SELECT * FROM `', propertiesTable, '` AS p WHERE p.domain = 0 AND p.id2=0 AND p.id=r.property_id);');
-            PREPARE replStmt1 FROM @replTblStmt1;
-            EXECUTE replStmt1;
-            DEALLOCATE PREPARE replStmt1;
-            SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t5 from (select uuid() uid) as alias;
-
-            SET @replTblStmt2 := CONCAT('INSERT IGNORE INTO `', propertiesTable, '` SELECT id, id2, domain FROM `', replTbl, '`;');
-            PREPARE replStmt2 FROM @replTblStmt2;
-            EXECUTE replStmt2;
-            DEALLOCATE PREPARE replStmt2;
-            SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t6 from (select uuid() uid) as alias;
-        END IF;
-    END IF;
-    SELECT propertiesTable, t1, t2, t3, t4, t5, t6, @initPOVPropertiesTableStmt1 as initPOVPropertiesTableStmt1, @initPOVPropertiesTableStmt2 as initPOVPropertiesTableStmt2, @replTblStmt1 as replTblStmt1, @replTblStmt2 as replTblStmt2;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initPOVRefidsTable` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initPOVRefidsTable`(in vInt INT UNSIGNED, in vText VARCHAR(255))
-BEGIN
-    DECLARE refIdsTable VARCHAR(255) DEFAULT NULL; 
-
-    
-    IF vText IS NOT NULL THEN
-        
-        call createTmpTable(refIdsTable, FALSE);
-        call initSubEntity(vInt, vText, refIdsTable);
-        
-    END IF;
-    SELECT refIdsTable;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initQuery` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initQuery`(in versioned BOOLEAN)
-BEGIN
-    CREATE TEMPORARY TABLE IF NOT EXISTS warnings (warning TEXT NOT NULL);
-
-    call createTmpTable(@resultSet, versioned);
-    SELECT @resultSet as tablename;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initSubEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initSubEntity`(in e_id INT UNSIGNED, in ename VARCHAR(255), in tableName VARCHAR(255))
-BEGIN
-    DECLARE ecount INT DEFAULT 0;
-    DECLARE op VARCHAR(255) DEFAULT '=';
-
-    IF LOCATE("%", ename) > 0 THEN
-        SET op = "LIKE";
-    END IF;
-
-    SET @stmtStr = CONCAT('INSERT IGNORE INTO `',
-        tableName,
-        '` (id) SELECT entity_id FROM name_data WHERE value ',
-        op,
-        ' ? AND domain_id=0;');
-
-    PREPARE stmt FROM @stmtStr;
-    SET @ename = ename;
-    EXECUTE stmt USING @ename;
-    SET ecount = ROW_COUNT();
-    DEALLOCATE PREPARE stmt;
-
-    IF e_id IS NOT NULL THEN
-        SET @stmtStr = CONCAT('INSERT IGNORE INTO `', tableName, '` (id) VALUES (', e_id, ')');
-        PREPARE stmt FROM @stmtStr;
-        EXECUTE stmt;
-        SET ecount = ecount + ROW_COUNT();
-        DEALLOCATE PREPARE stmt;
-    END IF;
-
-    IF ecount > 0 THEN
-        
-        call getChildren(tableName, False);
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `initSubProperty` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `initSubProperty`(in sourceSet VARCHAR(255), in propertiesTable VARCHAR(255), in refIdsTable VARCHAR(255))
-BEGIN
-DECLARE newTableName VARCHAR(255) DEFAULT NULL;
-    call registerTempTableName(newTableName);	
-    
-    SET @createSubPropertyListTableStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, domain INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (entity_id, id, domain)) ' );
-    
-    PREPARE createSubPropertyListTable FROM @createSubPropertyListTableStr; 
-    EXECUTE createSubPropertyListTable;
-    DEALLOCATE PREPARE createSubPropertyListTable;
-
-	SET @subResultSetStmtStr = CONCAT('INSERT IGNORE INTO `', newTableName, '` (domain, entity_id, id) 
-            SELECT data1.domain_id as domain, data1.entity_id as entity_id, data1.value as id 
-                FROM reference_data as data1 JOIN reference_data as data2 
-                    ON (data1.domain_id=0 
-                        AND data1.domain_id=data2.domain_id 
-                        AND data2.entity_id=data1.entity_id 
-                        AND (
-                            (data1.property_id=data2.value AND data2.status="REPLACEMENT")
-                            OR
-                            (data1.property_id!=data2.value AND data2.status!="REPLACEMENT" AND data1.status!="REPLACEMENT" AND data1.property_id=data2.property_id)
-                        )
-                        AND EXISTS (SELECT 1 FROM `', sourceSet, '` as source WHERE source.id=data1.entity_id LIMIT 1)',
-                        IF(propertiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', propertiesTable, '` as props WHERE props.id=data2.property_id LIMIT 1)')),
-                        IF(refIdsTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', refIdsTable, '` as refs WHERE refs.id=data1.value LIMIT 1)')),	
-		')'
-        );
-
-
-	PREPARE subResultSetStmt FROM @subResultSetStmtStr;
-	EXECUTE subResultSetStmt;
-    DEALLOCATE PREPARE subResultSetStmt;
-
-	SELECT newTableName as list;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insertEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertEntity`(in EntityName VARCHAR(255), in EntityDesc TEXT, in EntityRole VARCHAR(255), in ACL VARBINARY(65525))
-BEGIN
-    DECLARE NewEntityID INT UNSIGNED DEFAULT NULL;
-    DECLARE NewACLID INT UNSIGNED DEFAULT NULL;
-    DECLARE Hash VARBINARY(255) DEFAULT NULL;
-    DECLARE Version VARBINARY(255) DEFAULT NULL;
-    DECLARE Transaction VARBINARY(255) DEFAULT NULL;
-
-    
-    
-    call entityACL(NewACLID, ACL);
-
-    
-    INSERT INTO entities (description, role, acl)
-        VALUES (EntityDesc, EntityRole, NewACLID);
-
-    
-    SET NewEntityID = LAST_INSERT_ID();
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        
-        SET Transaction = @SRID;
-        SET Version = SHA1(UUID());
-        CALL insert_single_child_version(NewEntityID, Hash, Version, Null, Transaction);
-    END IF;
-
-    
-    
-    IF EntityName IS NOT NULL THEN
-        INSERT INTO name_data
-            (domain_id, entity_id, property_id, value, status, pidx)
-            VALUES (0, NewEntityID, 20, EntityName, "FIX", 0);
-    END IF;
-
-    SELECT NewEntityID as EntityID, Version as Version;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insertEntityProperty` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertEntityProperty`(
-    in DomainID INT UNSIGNED,
-    in EntityID INT UNSIGNED,
-    in PropertyID INT UNSIGNED,
-    in Datatable VARCHAR(255),
-    in PropertyValue TEXT,
-    in PropertyUnitSig BIGINT,
-    in PropertyStatus VARCHAR(255),
-    in NameOverride VARCHAR(255),
-    in DescOverride TEXT,
-    in DatatypeOverride INT UNSIGNED,
-    in Collection VARCHAR(255),
-    in PropertyIndex INT UNSIGNED)
-BEGIN
-    DECLARE ReferenceValueIVersion INT UNSIGNED DEFAULT NULL;
-    DECLARE ReferenceValue INT UNSIGNED DEFAULT NULL;
-    DECLARE AT_PRESENT INTEGER DEFAULT NULL;
-
-    CASE Datatable
-    WHEN 'double_data' THEN
-        INSERT INTO double_data
-        (domain_id, entity_id, property_id, value, unit_sig, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex);
-    WHEN 'integer_data' THEN
-        INSERT INTO integer_data
-        (domain_id, entity_id, property_id, value, unit_sig, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex);
-    WHEN 'datetime_data' THEN
-        INSERT INTO datetime_data
-        (domain_id, entity_id, property_id, value, value_ns, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, SUBSTRING_INDEX(PropertyValue, 'UTC', 1), IF(SUBSTRING_INDEX(PropertyValue, 'UTC', -1)='',NULL,SUBSTRING_INDEX(PropertyValue, 'UTC', -1)), PropertyStatus, PropertyIndex);
-    WHEN 'reference_data' THEN
-
-        
-        SET AT_PRESENT=LOCATE("@", PropertyValue);
-        IF is_feature_config("ENTITY_VERSIONING", "ENABLED") AND AT_PRESENT > 0 THEN
-            SET ReferenceValue = SUBSTRING_INDEX(PropertyValue, '@', 1);
-            SET ReferenceValueIVersion = get_iversion(ReferenceValue,
-                SUBSTRING_INDEX(PropertyValue, '@', -1));
-            
-            IF ReferenceValueIVersion IS NULL THEN
-                SELECT 0 from `ReferenceValueIVersion_WAS_NULL`;
-            END IF;
-
-        ELSE
-            SET ReferenceValue = PropertyValue;
-        END IF;
-
-        INSERT INTO reference_data
-            (domain_id, entity_id, property_id, value, value_iversion, status,
-                pidx)
-        VALUES
-            (DomainID, EntityID, PropertyID, ReferenceValue,
-                ReferenceValueIVersion, PropertyStatus, PropertyIndex);
-    WHEN 'enum_data' THEN
-        INSERT INTO enum_data
-        (domain_id, entity_id, property_id, value, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
-    WHEN 'date_data' THEN
-        INSERT INTO date_data
-        (domain_id, entity_id, property_id, value, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, SUBSTRING_INDEX(PropertyValue, '.', 1), PropertyStatus, PropertyIndex);
-    WHEN 'text_data' THEN
-        INSERT INTO text_data
-        (domain_id, entity_id, property_id, value, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
-    WHEN 'null_data' THEN
-        INSERT INTO null_data
-        (domain_id, entity_id, property_id, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyStatus, PropertyIndex);
-    WHEN 'name_data' THEN
-        INSERT INTO name_data
-        (domain_id, entity_id, property_id, value, status, pidx)
-        VALUES
-        (DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
-
-    ELSE
-        SELECT * FROM table_does_not_exist;
-    END CASE;
-
-    IF DatatypeOverride IS NOT NULL THEN
-        call overrideType(DomainID, EntityID, PropertyID, DatatypeOverride);
-        IF Collection IS NOT NULL THEN
-            INSERT INTO collection_type (domain_id, entity_id, property_id, collection) VALUES (DomainID, EntityID, PropertyID, Collection);
-        END IF;
-    END IF;
-
-    IF NameOverride IS NOT NULL THEN
-        call overrideName(DomainID, EntityID, PropertyID, NameOverride);
-    END IF;
-
-    IF DescOverride IS NOT NULL THEN
-        call overrideDesc(DomainID, EntityID, PropertyID, DescOverride);
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insertIsa` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertIsa`(IN c INT UNSIGNED, IN p INT UNSIGNED)
-insert_is_a_proc: BEGIN
-
-    INSERT INTO isa_cache (child, parent, rpath) VALUES (c, p, c);
-
-    IF p = c THEN
-        
-        LEAVE insert_is_a_proc;
-    END IF;
-    
-
-    
-    
-    
-    INSERT IGNORE INTO isa_cache SELECT
-        c
-            AS child,   
-        i.parent
-            AS parent,  
-        IF(p=i.rpath or i.rpath=parent,  
-           p,                            
-           concat(p, ">", i.rpath))      
-            AS rpath
-        FROM isa_cache AS i WHERE i.child = p AND i.child != i.parent;  
-
-    
-    
-    INSERT IGNORE INTO isa_cache SELECT
-        l.child,    
-        r.parent,   
-        if(l.rpath=l.child and r.rpath=c,  
-           c,                              
-           concat(if(l.rpath=l.child,        
-                     c,                         
-                     concat(l.rpath, '>', c)),  
-                  if(r.rpath=c,              
-                     '',                        
-                     concat('>', r.rpath))))    
-            AS rpath
-        FROM
-            isa_cache as l INNER JOIN isa_cache as r
-            ON (l.parent = c AND c = r.child AND l.child != l.parent); 
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insertLinCon` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertLinCon`(in signature_from BIGINT, in signature_to BIGINT, in a DECIMAL(65,30), in b_dividend BIGINT, in b_divisor BIGINT, in c DECIMAL(65,30))
-BEGIN
-
-    INSERT IGNORE INTO units_lin_con (signature_from, signature_to, a, b_dividend, b_divisor, c) VALUES (signature_from, signature_to, a, b_dividend, b_divisor, c);
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insertUser` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertUser`(in Name VARCHAR(255), in Password VARCHAR(255))
-BEGIN 
-
-
-INSERT INTO entities (name, role, acl) VALUES (Name, 'USER', 0);
-
-SET @LAST_UserID = LAST_INSERT_ID();
-
-INSERT INTO passwords VALUES (@LAST_UserID, Password);
-
-Select @LAST_UserID as UserID; 
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `insert_single_child_version` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `insert_single_child_version`(
-    in EntityID INT UNSIGNED,
-    in Hash VARBINARY(255),
-    in Version VARBINARY(255),
-    in Parent VARBINARY(255),
-    in Transaction VARBINARY(255))
-BEGIN
-    DECLARE newiversion INT UNSIGNED DEFAULT NULL;
-    DECLARE newipparent INT UNSIGNED DEFAULT NULL;
-
-    
-    IF Parent IS NOT NULL THEN
-        SELECT e._iversion INTO newipparent
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID
-            AND e.version = Parent;
-        IF newipparent IS NULL THEN
-            
-            SELECT concat("This parent does not exists: ", Parent)
-            FROM nonexisting;
-        END IF;
-    END IF;
-
-
-    
-    SELECT max(e._iversion)+1 INTO newiversion
-        FROM entity_version AS e
-        WHERE e.entity_id=EntityID;
-    IF newiversion IS NULL THEN
-        SET newiversion = 1;
-    END IF;
-
-    INSERT INTO entity_version
-        (entity_id, hash, version, _iversion, _ipparent, srid)
-        VALUES
-        (EntityID, Hash, Version, newiversion, newipparent, Transaction);
-
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `intersectTable` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `intersectTable`(in resultSetTable VARCHAR(255), in diff VARCHAR(255))
-BEGIN
-    SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE id NOT IN ( SELECT id FROM `', diff,'`)');
-    PREPARE diffStmt FROM @diffStmtStr;
-    EXECUTE diffStmt;
-
-    
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `isSubtype` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `isSubtype`(in c INT UNSIGNED, in p INT UNSIGNED)
-BEGIN
-	DECLARE ret BOOLEAN DEFAULT FALSE;
-	SELECT TRUE INTO ret FROM isa_cache AS i WHERE i.child=c AND i.parent=p LIMIT 1;
-    SELECT ret as ISA;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `overrideDesc` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideDesc`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Description TEXT)
-BEGIN
-	INSERT INTO desc_overrides (domain_id, entity_id, property_id, description) VALUES (DomainID, EntityID, PropertyID, Description);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `overrideName` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideName`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Name VARCHAR(255))
-BEGIN
-	INSERT INTO name_overrides (domain_id, entity_id, property_id, name) VALUES (DomainID, EntityID, PropertyID, Name);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `overrideType` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideType`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Datatype INT UNSIGNED)
-BEGIN
-	INSERT INTO data_type (domain_id, entity_id, property_id, datatype) VALUES (DomainID, EntityID, PropertyID, Datatype);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `raiseWarning` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `raiseWarning`(in str VARCHAR(20000))
-BEGIN
-    INSERT INTO warnings VALUES (str);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `registerSubdomain` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `registerSubdomain`(in amount INT UNSIGNED)
-BEGIN
-    DECLARE ED INTEGER DEFAULT NULL;
-
-    SELECT COUNT(id) INTO ED FROM entities WHERE Role='DOMAIN' AND id!=0;
-
-    WHILE ED < amount DO
-        INSERT INTO entities (description, role, acl) VALUES
-            (NULL, 'DOMAIN', 0);
-        SET ED = ED + 1;
-    END WHILE;
-
-    SELECT id as DomainID FROM entities WHERE Role='DOMAIN' and id!=0;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `registerTempTableName` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `registerTempTableName`(out newTableName VARCHAR(255))
-BEGIN
-    SET newTableName = md5(CONCAT(RAND(),CURRENT_TIMESTAMP()));
-    SET @tempTableList = IF(@tempTableList IS NULL,
-        CONCAT('`',newTableName,'`'),
-        CONCAT(@tempTableList, ',`', newTableName, '`')
-    );
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `reset_stats` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `reset_stats`()
-BEGIN
-	truncate table performance_schema.events_statements_summary_by_digest;
-	truncate table performance_schema.events_statements_history_long;
-  END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveDatatype` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveDatatype`(in DatatypeName VARCHAR(255))
-BEGIN
-
-Select e.id INTO @DatatypeID from entities e where e.name=DatatypeName AND e.role='DATATYPE' LIMIT 1;
-
-call retrieveEntity(@DatatypeID);
-
-
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntity`(
-    in EntityID INT UNSIGNED,
-    in Version VARBINARY(255))
-retrieveEntityBody: BEGIN
-    DECLARE FilePath VARCHAR(255) DEFAULT NULL;
-    DECLARE FileSize VARCHAR(255) DEFAULT NULL;
-    DECLARE FileHash VARCHAR(255) DEFAULT NULL;
-    DECLARE DatatypeID INT UNSIGNED DEFAULT NULL;
-    DECLARE CollectionName VARCHAR(255) DEFAULT NULL;
-    DECLARE IsHead BOOLEAN DEFAULT TRUE;
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        
-        IF Version IS NULL OR UPPER(Version) = "HEAD" THEN
-            SET Version = get_head_version(EntityID);
-        ELSEIF UPPER(LEFT(Version, 5)) = "HEAD~" THEN
-            SET IsHead = FALSE;
-            SET Version = get_head_relative(EntityID, SUBSTR(Version, 6));
-        ELSE
-            SELECT get_head_version(EntityID) = Version INTO IsHead;
-        END IF;
-
-        IF IsHead IS FALSE THEN
-            SET IVersion=get_iversion(EntityID, Version);
-
-            IF IVersion IS NULL THEN
-                
-                SELECT 0 FROM entities WHERE 0 = 1;
-                LEAVE retrieveEntityBody;
-            END IF;
-
-            SELECT path, size, HEX(hash)
-                INTO FilePath, FileSize, FileHash
-                FROM archive_files
-                WHERE file_id = EntityID
-                AND _iversion = IVersion
-                LIMIT 1;
-
-            SELECT datatype
-                INTO DatatypeID
-                FROM archive_data_type
-                WHERE domain_id = 0
-                AND entity_id = 0
-                AND property_id = EntityID
-                AND _iversion = IVersion
-                LIMIT 1;
-
-            SELECT collection
-                INTO CollectionName
-                FROM archive_collection_type
-                WHERE domain_id = 0
-                AND entity_id = 0
-                AND property_id = EntityID
-                AND _iversion = IVersion
-                LIMIT 1;
-
-            
-            SELECT
-                ( SELECT value FROM
-                    ( SELECT value FROM name_data
-                        WHERE domain_id = 0
-                        AND entity_ID = DatatypeID
-                        AND property_id = 20
-                        UNION SELECT DatatypeID AS value
-                    ) AS tmp LIMIT 1 ) AS Datatype,
-                CollectionName AS Collection,
-                EntityID AS EntityID,
-                ( SELECT value FROM archive_name_data
-                    WHERE domain_id = 0
-                    AND entity_ID = EntityID
-                    AND property_id = 20
-                    AND _iversion = IVersion
-                    
-                    ) AS EntityName,
-                e.description AS EntityDesc,
-                e.role AS EntityRole,
-                FileSize AS FileSize,
-                FilePath AS FilePath,
-                FileHash AS FileHash,
-                (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL,
-                Version AS Version
-            FROM archive_entities AS e
-            WHERE e.id = EntityID
-            AND e._iversion = IVersion
-            LIMIT 1;
-
-            
-            LEAVE retrieveEntityBody;
-
-        END IF;
-    END IF;
-
-    SELECT path, size, hex(hash)
-        INTO FilePath, FileSize, FileHash
-        FROM files
-        WHERE file_id = EntityID
-        LIMIT 1;
-
-    SELECT datatype INTO DatatypeID
-        FROM data_type
-        WHERE domain_id=0
-        AND entity_id=0
-        AND property_id=EntityID
-        LIMIT 1;
-
-    SELECT collection INTO CollectionName
-        FROM collection_type
-        WHERE domain_id=0
-        AND entity_id=0
-        AND property_id=EntityID
-        LIMIT 1;
-
-    SELECT
-        ( SELECT value FROM name_data
-            WHERE domain_id = 0
-            AND entity_ID = DatatypeID
-            AND property_id = 20 LIMIT 1 ) AS Datatype,
-        CollectionName AS Collection,
-        EntityID AS EntityID,
-        ( SELECT value FROM name_data
-            WHERE domain_id = 0
-            AND entity_ID = EntityID
-            AND property_id = 20 LIMIT 1) AS EntityName,
-        e.description AS EntityDesc,
-        e.role AS EntityRole,
-        FileSize AS FileSize,
-        FilePath AS FilePath,
-        FileHash AS FileHash,
-        (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL,
-        Version AS Version
-    FROM entities e WHERE id = EntityID LIMIT 1;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityParents` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntityParents`(
-    in EntityID INT UNSIGNED,
-    in Version VARBINARY(255))
-retrieveEntityParentsBody: BEGIN
-
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-    DECLARE IsHead BOOLEAN DEFAULT TRUE;
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        IF Version IS NOT NULL THEN
-            SELECT get_head_version(EntityID) = Version INTO IsHead;
-        END IF;
-
-        IF IsHead IS FALSE THEN
-            SELECT e._iversion INTO IVersion
-                FROM entity_version as e
-                WHERE e.entity_id = EntityID
-                AND e.version = Version;
-
-            IF IVersion IS NULL THEN
-                
-                LEAVE retrieveEntityParentsBody;
-            END IF;
-
-            SELECT
-                i.parent AS ParentID,
-                ( SELECT value FROM name_data
-                    WHERE domain_id = 0
-                    AND entity_id = ParentID
-                    AND property_id = 20
-                ) AS ParentName, 
-                                 
-                                 
-                                 
-                                 
-                e.description AS ParentDescription,
-                e.role AS ParentRole,
-                (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL
-                FROM archive_isa AS i JOIN entities AS e
-                    ON (i.parent = e.id)
-                WHERE i.child = EntityID
-                AND i.child_iversion = IVersion
-                AND i.direct IS TRUE
-                ;
-
-            LEAVE retrieveEntityParentsBody;
-        END IF;
-    END IF;
-
-    SELECT
-        i.parent AS ParentID,
-        ( SELECT value FROM name_data
-            WHERE domain_id = 0
-            AND entity_id = ParentID
-            AND property_id = 20 ) AS ParentName,
-        e.description AS ParentDescription,
-        e.role AS ParentRole,
-        (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL
-        FROM isa_cache AS i JOIN entities AS e
-            ON (i.parent = e.id)
-        WHERE i.child = EntityID
-        AND i.rpath = EntityID;
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityProperties` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntityProperties`(
-    in DomainID INT UNSIGNED,
-    in EntityID INT UNSIGNED,
-    in Version VARBINARY(255))
-retrieveEntityPropertiesBody: BEGIN
-
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-    DECLARE IsHead BOOLEAN DEFAULT TRUE;
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        IF Version IS NOT NULL THEN
-            IF DomainID = 0 THEN
-                SELECT get_head_version(EntityID) = Version INTO IsHead;
-            ELSE
-                SELECT get_head_version(DomainID) = Version INTO IsHead;
-            END IF;
-
-        END IF;
-
-        IF IsHead IS FALSE THEN
-            SELECT e._iversion INTO IVersion
-                FROM entity_version as e
-                WHERE ((e.entity_id = EntityID AND DomainID = 0)
-                    OR (e.entity_id = DomainID))
-                AND e.version = Version;
-
-            IF IVersion IS NULL THEN
-                
-                LEAVE retrieveEntityPropertiesBody;
-            END IF;
-
-            
-            SELECT
-                property_id AS PropertyID,
-                value AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_double_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                value AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_integer_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                CONCAT(value, '.NULL.NULL') AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_date_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                CONCAT(value, 'UTC', IF(value_ns IS NULL, '', value_ns))
-                    AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_datetime_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                value AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_text_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                value AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_enum_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                IF(value_iversion IS NULL, value,
-                    
-                    CONCAT(value, "@", _get_version(value, value_iversion)))
-                    AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_reference_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                NULL AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_null_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                property_id AS PropertyID,
-                value AS PropertyValue,
-                status AS PropertyStatus,
-                pidx AS PropertyIndex
-            FROM archive_name_data
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND property_id != 20
-            AND _iversion = IVersion;
-
-            LEAVE retrieveEntityPropertiesBody;
-        END IF;
-    END IF;
-
-    
-    SELECT
-        property_id AS PropertyID,
-        value AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM double_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        value AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM integer_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        CONCAT(value, '.NULL.NULL') AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM date_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        CONCAT(value, 'UTC', IF(value_ns IS NULL, '', value_ns))
-            AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM datetime_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        value AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM text_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        value AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM enum_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        IF(value_iversion IS NULL, value,
-            CONCAT(value, "@", _get_version(value, value_iversion)))
-            AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM reference_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        NULL AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM null_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    
-    SELECT
-        property_id AS PropertyID,
-        value AS PropertyValue,
-        status AS PropertyStatus,
-        pidx AS PropertyIndex
-    FROM name_data
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-    AND property_id != 20;
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveOverrides` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveOverrides`(
-    in DomainID INT UNSIGNED,
-    in EntityID INT UNSIGNED,
-    in Version VARBINARY(255))
-retrieveOverridesBody: BEGIN
-
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-    DECLARE IsHead BOOLEAN DEFAULT TRUE;
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        IF Version IS NOT NULL THEN
-            IF DomainID = 0 THEN
-                SELECT get_head_version(EntityID) = Version INTO IsHead;
-            ELSE
-                SELECT get_head_version(DomainID) = Version INTO IsHead;
-            END IF;
-        END IF;
-
-        IF IsHead IS FALSE THEN
-            SELECT e._iversion INTO IVersion
-                FROM entity_version as e
-                WHERE ((e.entity_id = EntityID AND DomainID = 0)
-                    OR (e.entity_id = DomainID))
-                AND e.version = Version;
-
-            IF IVersion IS NULL THEN
-                
-                LEAVE retrieveOverridesBody;
-            END IF;
-
-            
-            SELECT
-                NULL AS collection_override,
-                name AS name_override,
-                NULL AS desc_override,
-                NULL AS type_override,
-                entity_id,
-                property_id
-            FROM archive_name_overrides
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                NULL AS collection_override,
-                NULL AS name_override,
-                description AS desc_override,
-                NULL AS type_override,
-                entity_id,
-                property_id
-            FROM archive_desc_overrides
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                NULL AS collection_override,
-                NULL AS name_override,
-                NULL AS desc_override,
-                IFNULL((SELECT value FROM name_data
-                    WHERE domain_id = 0
-                    AND entity_id = datatype
-                    AND property_id = 20
-                    LIMIT 1), datatype) AS type_override,
-                entity_id,
-                property_id
-            FROM archive_data_type
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion
-
-            UNION ALL
-
-            
-            SELECT
-                collection AS collection_override,
-                NULL AS name_override,
-                NULL AS desc_override,
-                NULL AS type_override,
-                entity_id,
-                property_id
-            FROM archive_collection_type
-            WHERE domain_id = DomainID
-            AND entity_id = EntityID
-            AND _iversion = IVersion;
-
-            LEAVE retrieveOverridesBody;
-        END IF;
-    END IF;
-
-    SELECT
-        NULL AS collection_override,
-        name AS name_override,
-        NULL AS desc_override,
-        NULL AS type_override,
-        entity_id,
-        property_id
-    FROM name_overrides
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    SELECT
-        NULL AS collection_override,
-        NULL AS name_override,
-        description AS desc_override,
-        NULL AS type_override,
-        entity_id,
-        property_id
-    FROM desc_overrides
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    SELECT
-        NULL AS collection_override,
-        NULL AS name_override,
-        NULL AS desc_override,
-        IFNULL((SELECT value FROM name_data
-            WHERE domain_id = 0
-            AND entity_ID = datatype
-            AND property_id = 20 LIMIT 1), datatype) AS type_override,
-        entity_id,
-        property_id
-    FROM data_type
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID
-
-    UNION ALL
-
-    SELECT
-        collection AS collection_override,
-        NULL AS name_override,
-        NULL AS desc_override,
-        NULL AS type_override,
-        entity_id,
-        property_id
-    FROM collection_type
-    WHERE domain_id = DomainID
-    AND entity_id = EntityID;
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `retrieveQueryTemplateDef` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveQueryTemplateDef`(
-    in EntityID INT UNSIGNED,
-    in Version VARBINARY(255))
-retrieveQueryTemplateDefBody: BEGIN
-
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-    DECLARE IsHead BOOLEAN DEFAULT TRUE;
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        
-        IF Version IS NOT NULL THEN
-            SELECT get_head_version(EntityID) = Version INTO IsHead;
-        END IF;
-
-        IF IsHead IS FALSE THEN
-            
-            SELECT e._iversion INTO IVersion
-                FROM entity_version as e
-                WHERE e.entity_id = EntityID
-                AND e.version = Version;
-
-            IF IVersion IS NULL THEN
-                
-                LEAVE retrieveQueryTemplateDefBody;
-            END IF;
-
-            SELECT definition
-            FROM archive_query_template_def
-            WHERE id = EntityID
-            AND _iversion = IVersion;
-
-            LEAVE retrieveQueryTemplateDefBody;
-        END IF;
-    END IF;
-
-    SELECT definition
-    FROM query_template_def
-    WHERE id = EntityID;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `setFileProperties` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `setFileProperties`(
-    in EntityID INT UNSIGNED,
-    in FilePath TEXT,
-    in FileSize BIGINT UNSIGNED,
-    in FileHash VARCHAR(255)
-)
-BEGIN
-    DECLARE IVersion INT UNSIGNED DEFAULT NULL;
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        SELECT max(e._iversion) INTO IVersion
-            FROM entity_version AS e
-            WHERE e.entity_id = EntityID;
-
-        INSERT INTO archive_files (file_id, path, size, hash,
-                _iversion)
-            SELECT file_id, path, size, hash, IVersion AS _iversion
-            FROM files
-            WHERE file_id = EntityID;
-    END IF;
-
-    DELETE FROM files WHERE file_id = EntityID;
-
-    IF FilePath IS NOT NULL THEN
-        INSERT INTO files (file_id, path, size, hash)
-            VALUES (EntityID, FilePath, FileSize, unhex(FileHash));
-    END IF;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `setPassword` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `setPassword`(in EntityID INT UNSIGNED, in NewPassword VARCHAR(255))
-BEGIN
-
-
-	DELETE FROM passwords where entity_id=EntityID;
-	INSERT INTO passwords (entity_id, password) VALUES (EntityID, NewPassword);
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `set_transaction` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `set_transaction`(
-    srid VARBINARY(255),
-    username VARCHAR(255),
-    realm VARCHAR(255),
-    seconds BIGINT UNSIGNED,
-    nanos INT(10) UNSIGNED)
-BEGIN
-
-    SET @SRID = srid;  
-    INSERT INTO transactions (srid, username, realm, seconds, nanos)
-        VALUES (srid, username, realm, seconds, nanos);
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `showEntityAutoIncr` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `showEntityAutoIncr`()
-BEGIN
-SELECT `AUTO_INCREMENT`
-FROM  INFORMATION_SCHEMA.TABLES
-WHERE TABLE_SCHEMA = 'caosdb'
-AND   TABLE_NAME   = 'entities';
-
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `updateEntity` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `updateEntity`(
-    in EntityID INT UNSIGNED,
-    in EntityName VARCHAR(255),
-    in EntityDescription TEXT,
-    in EntityRole VARCHAR(255),
-    in Datatype VARCHAR(255),
-    in Collection VARCHAR(255),
-    in ACL VARBINARY(65525))
-BEGIN
-    DECLARE ACLID INT UNSIGNED DEFAULT NULL;
-    DECLARE Hash VARBINARY(255) DEFAULT NULL;
-    DECLARE Version VARBINARY(255) DEFAULT SHA1(UUID());
-    DECLARE ParentVersion VARBINARY(255) DEFAULT NULL;
-    DECLARE Transaction VARBINARY(255) DEFAULT NULL;
-    DECLARE OldIVersion INT UNSIGNED DEFAULT NULL;
-
-    call entityACL(ACLID, ACL);
-
-    IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN
-        SELECT max(_iversion) INTO OldIVersion
-            FROM entity_version
-            WHERE entity_id = EntityID;
-
-        
-        INSERT INTO archive_entities (id, description, role,
-                acl, _iversion)
-            SELECT e.id, e.description, e.role, e.acl, OldIVersion
-            FROM entities AS e
-            WHERE e.id = EntityID;
-
-        INSERT INTO archive_data_type (domain_id, entity_id, property_id,
-                datatype, _iversion)
-            SELECT e.domain_id, e.entity_id, e.property_id, e.datatype,
-                OldIVersion
-            FROM data_type AS e
-            WHERE e.domain_id = 0
-            AND e.entity_id = 0
-            AND e.property_id = EntityID;
-
-        INSERT INTO archive_collection_type (domain_id, entity_id, property_id,
-                collection, _iversion)
-            SELECT e.domain_id, e.entity_id, e.property_id, e.collection,
-                OldIVersion
-            FROM collection_type as e
-            WHERE e.domain_id = 0
-            AND e.entity_id = 0
-            AND e.property_id = EntityID;
-
-
-        SET Transaction = @SRID;
-        SELECT e.version INTO ParentVersion
-            FROM entity_version as e
-            WHERE e.entity_id = EntityID
-            AND e._iversion = OldIVersion;
-        CALL insert_single_child_version(
-            EntityID, Hash, Version,
-            ParentVersion, Transaction);
-    END IF;
-
-    UPDATE entities e
-        SET e.description = EntityDescription,
-            e.role=EntityRole,
-            e.acl = ACLID
-        WHERE e.id = EntityID;
-
-    
-    
-    DELETE FROM name_data
-        WHERE domain_id = 0 AND entity_id = EntityID AND property_id = 20;
-    IF EntityName IS NOT NULL THEN
-        INSERT INTO name_data
-                (domain_id, entity_id, property_id, value, status, pidx)
-            VALUES (0, EntityID, 20, EntityName, "FIX", 0);
-    END IF;
-
-    DELETE FROM data_type
-        WHERE domain_id=0 AND entity_id=0 AND property_id=EntityID;
-
-    DELETE FROM collection_type
-        WHERE domain_id=0 AND entity_id=0 AND property_id=EntityID;
-
-    IF Datatype IS NOT NULL THEN
-        INSERT INTO data_type (domain_id, entity_id, property_id, datatype)
-            SELECT 0, 0, EntityID,
-                ( SELECT entity_id FROM name_data WHERE domain_id = 0
-                    AND property_id = 20 AND value = Datatype LIMIT 1 );
-
-        IF Collection IS NOT NULL THEN
-            INSERT INTO collection_type (domain_id, entity_id, property_id,
-                    collection)
-                SELECT 0, 0, EntityID, Collection;
-        END IF;
-    END IF;
-
-    Select Version as Version;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!50003 DROP PROCEDURE IF EXISTS `updateLinCon` */;
-/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
-/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
-/*!50003 SET @saved_col_connection = @@collation_connection */ ;
-/*!50003 SET character_set_client  = utf8 */ ;
-/*!50003 SET character_set_results = utf8 */ ;
-/*!50003 SET collation_connection  = utf8_general_ci */ ;
-/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
-/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
-DELIMITER ;;
-CREATE DEFINER=`caosdb`@`%` PROCEDURE `updateLinCon`(in sig_from BIGINT, in sig_to BIGINT, in new_a DECIMAL(65,30), in new_b_dividend BIGINT, in new_b_divisor BIGINT, in new_c DECIMAL(65,30))
-BEGIN
-    UPDATE units_lin_con SET signature_to=sig_to, a=new_a, b_dividend=new_b_dividend, b_divisor=new_b_divisor, c=new_c where signature_from=sig_from;
-
-END ;;
-DELIMITER ;
-/*!50003 SET sql_mode              = @saved_sql_mode */ ;
-/*!50003 SET character_set_client  = @saved_cs_client */ ;
-/*!50003 SET character_set_results = @saved_cs_results */ ;
-/*!50003 SET collation_connection  = @saved_col_connection */ ;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2022-01-20  8:27:49
diff --git a/integrationtests/test-profile/custom/other/restore/caosroot.2022-01-20T09271642667269.tar.gz b/integrationtests/test-profile/custom/other/restore/caosroot.2022-01-20T09271642667269.tar.gz
deleted file mode 100644
index 4f6809d8067ecf58fd449567a8339fc3f498e651..0000000000000000000000000000000000000000
Binary files a/integrationtests/test-profile/custom/other/restore/caosroot.2022-01-20T09271642667269.tar.gz and /dev/null differ
diff --git a/integrationtests/test-profile/profile.yml b/integrationtests/test-profile/profile.yml
index 4cbbe28c87d1146910479e13a23fa7c20933a926..f830a2fbe6c6a4ae35362676db310f3eadf6f4cc 100644
--- a/integrationtests/test-profile/profile.yml
+++ b/integrationtests/test-profile/profile.yml
@@ -21,7 +21,8 @@ default:
     # extroot: From where files are copied/symlinked.  This is a
     # list of `NAME: PATH` pairs or a single path.  
     extroot:
-      "": "paths/extroot"
+    #  "": "paths/extroot"
+      "": "../test_data/extroot/"
     #
     #  "base": "/path/to/base/dir"
     #  "other": "/path/to/other"
diff --git a/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json b/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
index 64df90e55eff065b1cc249a634444a72f9fd00d2..9b81cc094bf7d1c35154d8f092a96d5f5fae35c9 100644
--- a/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
+++ b/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
@@ -1,5 +1,5 @@
 {
-        "title": "Random numbers created on a random autumn day in a random office",
+        "title": "Random numbers created on a random autumn day in a random person's office",
         "abstract": "For demonstration purposes we created random numbers on a computer in an office of the CLOUD. This demonstration dataset is used in the DataCloud, a curated cloud storage for scientific data.",
         "Event": [
             {
diff --git a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
index 1589cba2b44afc3e2645b0ee72f91bf83b327032..eaf2690ae130cb61c8a74452e3e4e1d4fd06846a 100644
--- a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
+++ b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
@@ -318,6 +318,13 @@ Data:
                         Dataset:
                           Project: $Project
                       subtree:
+                        name_element:
+                          type: DictTextElement
+                          match_name: "name"
+                          match_value: "(?P<name>.*)"
+                          records:
+                            Project:
+                              name: $name
                         full_name_element:
                           type: DictTextElement
                           match_name: "full_name"
diff --git a/integrationtests/test_data/extroot/realworld_example/identifiables.yml b/integrationtests/test_data/extroot/realworld_example/identifiables.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0ea0265ecfec05392c599457d81339bc91ba18d0
--- /dev/null
+++ b/integrationtests/test_data/extroot/realworld_example/identifiables.yml
@@ -0,0 +1,22 @@
+license:
+  - name
+project_type:
+  - name
+Keyword:
+  - name
+Taxon:
+  - name
+Person:
+  - email
+  # - full_name
+Dataset:
+  - title
+  # - DOI
+Event:
+  - longitude
+  - latitude
+  - start_datetime
+Dataspace:
+  - dataspace_id
+Project:
+  - name
diff --git a/integrationtests/test_data/extroot/realworld_example/pycaosdb.ini b/integrationtests/test_data/extroot/realworld_example/pycaosdb.ini
new file mode 120000
index 0000000000000000000000000000000000000000..bc443439d842f18ce05e002e5f6b95d37ca22747
--- /dev/null
+++ b/integrationtests/test_data/extroot/realworld_example/pycaosdb.ini
@@ -0,0 +1 @@
+../../../pycaosdb.ini
\ No newline at end of file
diff --git a/integrationtests/test_data/extroot/realworld_example/schema/zmt-organisation.yml b/integrationtests/test_data/extroot/realworld_example/schema/organisation.yml
similarity index 100%
rename from integrationtests/test_data/extroot/realworld_example/schema/zmt-organisation.yml
rename to integrationtests/test_data/extroot/realworld_example/schema/organisation.yml
diff --git a/integrationtests/test_data/extroot/use_case_simple_presentation/DataAnalysis/results.md b/integrationtests/test_data/extroot/use_case_simple_presentation/DataAnalysis/results.md
new file mode 100644
index 0000000000000000000000000000000000000000..b867d778942ce5595286870bd6a92e53015be0e8
--- /dev/null
+++ b/integrationtests/test_data/extroot/use_case_simple_presentation/DataAnalysis/results.md
@@ -0,0 +1,8 @@
+---
+identifier: test analysis
+date: 2022-03-16
+source_identifier: crawlertest
+source_date: 2022-03-16
+
+frequency: 17
+---
diff --git a/integrationtests/test_data/extroot/use_case_simple_presentation/ExperimentalData/data.md b/integrationtests/test_data/extroot/use_case_simple_presentation/ExperimentalData/data.md
new file mode 100644
index 0000000000000000000000000000000000000000..60dcd78ed1f70428b18e8762a14dc3fe7f3fa5cd
--- /dev/null
+++ b/integrationtests/test_data/extroot/use_case_simple_presentation/ExperimentalData/data.md
@@ -0,0 +1,5 @@
+---
+date: "2022-03-16"
+identifier: crawlertest
+alpha: 16
+---
diff --git a/integrationtests/test_data/extroot/use_case_simple_presentation/cfood.yml b/integrationtests/test_data/extroot/use_case_simple_presentation/cfood.yml
new file mode 100644
index 0000000000000000000000000000000000000000..6495e1828dc56e99459c162f7751951f880ea55c
--- /dev/null
+++ b/integrationtests/test_data/extroot/use_case_simple_presentation/cfood.yml
@@ -0,0 +1,117 @@
+# This is only a scifolder test cfood with a limited functionality.
+# The full scifolder cfood will be developed here:
+# https://gitlab.indiscale.com/caosdb/src/crawler-cfoods/scifolder-cfood
+
+Definitions:
+  type: Definitions
+  #include "description.yml"
+
+Converters: {}
+
+extroot:
+  type: Directory
+  match: use_case_simple_presentation
+  subtree:
+    ExperimentalData:  # name of the converter
+      type: Directory
+      match: ExperimentalData
+      subtree:
+        DataFile:
+          type: MarkdownFile
+          match: ^data\.md$
+
+          records:
+            mdfile:
+              parents:
+                - mdfile
+              role: File
+              path: $DataFile
+              file: $DataFile
+
+            Experiment:
+              mdfile: $mdfile
+
+
+          subtree:
+            date:
+              type: DictTextElement
+              match_name: date
+              match_value: (?P<date>.+)
+              records:
+                Experiment:
+                  date: $date
+            identifier:
+              type: DictTextElement
+              match_name: identifier
+              match_value: (?P<identifier>.+)
+              records:
+                Experiment:
+                  identifier: $identifier
+            parameter_alpha:
+              type: DictTextElement
+              match_name: alpha
+              match_value: (?P<alpha>[0-9]+)
+              records:
+                Experiment:
+                  alpha: $alpha
+
+    DataAnalysis:
+      type: Directory
+      match: DataAnalysis
+      subtree:
+        DataFile:
+          type: MarkdownFile
+          match: ^results\.md$
+
+          records:
+            mdfile:
+              parents:
+                - mdfile
+              role: File
+              path: $DataFile
+              file: $DataFile
+
+            Experiment: {}
+
+            DataAnalysis:
+              mdfile: $mdfile
+              sources: +$Experiment
+
+          subtree:
+            date:
+              type: DictTextElement
+              match_name: date
+              match_value: (?P<date>.+)
+              records:
+                DataAnalysis:
+                  date: $date
+            identifier:
+              type: DictTextElement
+              match_name: identifier
+              match_value: (?P<identifier>.+)
+              records:
+                DataAnalysis:
+                  identifier: $identifier
+
+            frequency:
+              type: DictTextElement
+              match_name: frequency
+              match_value: (?P<frequency>[0-9]+)
+              records:
+                DataAnalysis:
+                  frequency: $frequency
+
+            source_date:
+              type: DictTextElement
+              match_name: source_date
+              match_value: (?P<source_date>.+)
+              records:
+                Experiment:
+                  date: $source_date
+            source_identifier:
+              type: DictTextElement
+              match_name: source_identifier
+              match_value: (?P<source_identifier>.+)
+              records:
+                Experiment:
+                  identifier: $source_identifier
diff --git a/integrationtests/test_data/extroot/use_case_simple_presentation/identifiables.yml b/integrationtests/test_data/extroot/use_case_simple_presentation/identifiables.yml
new file mode 100644
index 0000000000000000000000000000000000000000..94b593bfb4c425ce71a4f94504d4f0033538cacb
--- /dev/null
+++ b/integrationtests/test_data/extroot/use_case_simple_presentation/identifiables.yml
@@ -0,0 +1,6 @@
+Experiment:
+- date
+- identifier
+DataAnalysis:
+- date
+- identifier
diff --git a/integrationtests/test_data/extroot/use_case_simple_presentation/model.yml b/integrationtests/test_data/extroot/use_case_simple_presentation/model.yml
new file mode 100644
index 0000000000000000000000000000000000000000..bcf041c9586841ef9c61b9aef62574985c2be471
--- /dev/null
+++ b/integrationtests/test_data/extroot/use_case_simple_presentation/model.yml
@@ -0,0 +1,41 @@
+
+
+
+ScientificActivity:
+  description: |
+    The base record type for all scientific activities, like experiments,
+    data analysis records, simulations or publications.
+  recommended_properties:
+    sources:
+      description: This scientific activity is based on the activity referenced here.
+      datatype: LIST<ScientificActivity>
+    date:
+      description: The date according to https://doi.org/10.3390/data5020043
+      datatype: DATETIME
+    identifier:
+      description: An identifier according to https://doi.org/10.3390/data5020043
+      datatype: TEXT
+    mdfile:
+      description: The file storing information about this record.
+
+Experiment:
+  description: |
+    The base record type for all records containing data from experiments.
+  inherit_from_obligatory:
+    - ScientificActivity
+  obligatory_properties:
+    alpha:
+      description: A ficticious piece of data.
+      datatype: DOUBLE
+      unit: km
+
+DataAnalysis:
+  description: |
+    The base record type for all records containing results from data analysis.
+  inherit_from_obligatory:
+    - ScientificActivity
+  recommended_properties:
+    frequency:
+      description: A ficticious piece of data.
+      datatype: DOUBLE
+      unit: Hz
diff --git a/integrationtests/test_realworld_example.py b/integrationtests/test_realworld_example.py
new file mode 100644
index 0000000000000000000000000000000000000000..da3fb69ce635ae69cd33cbf01de9df8ebf019661
--- /dev/null
+++ b/integrationtests/test_realworld_example.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2022 Henrik tom Wörden <h.tomwoerden@indiscale.com>
+# Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+"""
+module description
+"""
+import json
+import os
+
+import caosdb as db
+
+from caoscrawler.crawl import Crawler, crawler_main
+from caoscrawler.converters import JSONFileConverter, DictConverter
+from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.structure_elements import File, JSONFile, Directory
+import pytest
+from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
+
+#from caosadvancedtools.testutils import clear_database, set_test_key
+import sys
+
+# TODO is not yet merged in caosadvancedtools
+# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+
+
+def rfp(*pathcomponents):
+    """
+    Return full path.
+    Shorthand convenience function.
+    """
+    return os.path.join(os.path.dirname(__file__), *pathcomponents)
+
+
+DATADIR = rfp("test_data", "extroot", "realworld_example")
+
+
+@pytest.fixture
+def usemodel():
+    # First load dataspace data model
+    dataspace_definitions = parse_model_from_json_schema(
+        os.path.join(DATADIR, "schema", "dataspace.schema.json"))
+    dataspace_definitions.sync_data_model(noquestion=True)
+
+    # Then general dataset definitions
+    dataset_definitions = parse_model_from_json_schema(
+        os.path.join(DATADIR, "schema", "dataset.schema.json"))
+    dataset_definitions.sync_data_model(noquestion=True)
+
+    # Finally, add inheritances as defined in yaml
+    dataset_inherits = parse_model_from_yaml(
+        os.path.join(DATADIR, "schema", "dataset-inheritance.yml"))
+    dataset_inherits.sync_data_model(noquestion=True)
+
+
+@pytest.fixture
+def clear_database():
+    # TODO(fspreck): Remove once the corresponding advancedtools function can
+    # be used.
+    ents = db.execute_query("FIND ENTITY WITH ID>99")
+    if ents:
+        ents.delete()
+
+
+def create_identifiable_adapter():
+    ident = CaosDBIdentifiableAdapter()
+    ident.register_identifiable("license", (
+        db.RecordType()
+        .add_parent("license")
+        .add_property("name")))
+    ident.register_identifiable("project_type", (
+        db.RecordType()
+        .add_parent("project_type")
+        .add_property("name")))
+    ident.register_identifiable("Person", (
+        db.RecordType()
+        .add_parent("Person")
+        .add_property("full_name")))
+
+    return ident
+
+
+def test_dataset(clear_database, usemodel):
+    ident = create_identifiable_adapter()
+    crawler = Crawler(identifiableAdapter=ident)
+    crawler_definition = crawler.load_definition(
+        os.path.join(DATADIR, "dataset_cfoods.yml"))
+    # print(json.dumps(crawler_definition, indent=3))
+    # Load and register converter packages:
+    converter_registry = crawler.load_converters(crawler_definition)
+    # print("DictIntegerElement" in converter_registry)
+
+    records = crawler.start_crawling(
+        Directory("data", os.path.join(DATADIR, 'data')),
+        crawler_definition,
+        converter_registry
+    )
+    crawler.synchronize()
+
+    dataspace = db.execute_query("FIND RECORD Dataspace WITH name=35 AND dataspace_id=20002 AND "
+                                 "archived=FALSE AND url='https://datacloud.de/index.php/f/7679'"
+                                 " AND Person", unique=True)
+    assert dataspace.get_property("start_date").value == "2022-03-01"
+    db.execute_query("FIND RECORD Person with full_name='Max Schmitt' AND"
+                     " given_name='Max'", unique=True)
+
+    dataset = db.execute_query(f"FIND RECORD Dataset with Dataspace={dataspace.id} AND title="
+                               "'Random numbers created on a random autumn day in a random person\\'s office'"
+                               "", unique=True)
+    assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Person WITH full_name="
+                            "'Alexa Nozone' AND WHICH REFERENCES Person WITH full_name='Max Schmitt'"
+                            "") == 1
+    assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH "
+                            "start_datetime='2022-02-10T16:36:48+01:00'") == 1
+
+
+def test_event_update(clear_database, usemodel):
+
+    identifiable_path = os.path.join(DATADIR, "identifiables.yml")
+    crawler_definition_path = os.path.join(DATADIR, "dataset_cfoods.yml")
+
+    # TODO(fspreck): Use crawler_main
+    crawler_main(
+        os.path.join(DATADIR, 'data'),
+        crawler_definition_path,
+        identifiable_path,
+        True,
+        os.path.join(DATADIR, "provenance.yml"),
+        False,
+        ""
+    )
+
+    old_dataset_rec = db.execute_query(
+        "FIND RECORD Dataset WHICH HAS AN EVENT WITH location='Bremen, Germany'")
+    assert len(old_dataset_rec) == 1
+    old_dataset_rec = old_dataset_rec[0]
+    assert old_dataset_rec.get_property("Event").datatype == db.LIST("Event")
+    assert len(old_dataset_rec.get_property("Event").value) == 1
+    old_event_rec = db.Record(
+        id=old_dataset_rec.get_property("Event").value[0]).retrieve()
+
+    # TODO(fspreck): crawl again manually, edit the event records in the update
+    # list, synchronize, and test whether the events have been updated.
+    ident = CaosDBIdentifiableAdapter()
+    ident.load_from_yaml_definition(identifiable_path)
+
+    second_crawler = Crawler(identifiableAdapter=ident)
+    crawler_definition = second_crawler.load_definition(
+        crawler_definition_path)
+    converter_registry = second_crawler.load_converters(crawler_definition)
+    records = second_crawler.start_crawling(
+        Directory("data", os.path.join(DATADIR, "data")),
+        crawler_definition,
+        converter_registry
+    )
+
+    for rec in records:
+        if rec.parents[0].name == "Event":
+            rec.get_property("longitude").value = 0.0
+            rec.get_property("latitude").value = 0.0
+            rec.get_property("location").value = "Origin"
+        elif rec.parents[0].name == "Dataset":
+            rec.get_property("Event").value[0].get_property(
+                "longitude").value = 0.0
+            rec.get_property("Event").value[0].get_property(
+                "latitude").value = 0.0
+            rec.get_property("Event").value[0].get_property(
+                "location").value = "Origin"
+    second_crawler.synchronize()
+
+    # Dataset is still the same Record, but with an updated event
+    new_dataset_rec = db.Record(id=old_dataset_rec.id).retrieve()
+    for prop in old_dataset_rec.get_properties():
+        if not prop.name == "Event":
+            assert new_dataset_rec.get_property(
+                prop.name).datatype == prop.datatype
+            assert new_dataset_rec.get_property(
+                prop.name).value == prop.value
+    assert new_dataset_rec.get_property("Event").datatype == db.LIST("Event")
+    assert new_dataset_rec.get_property("Event").value is not None
+    assert len(new_dataset_rec.get_property("Event").value) == 1
+    assert new_dataset_rec.get_property("Event").value[0] != old_event_rec.id
+
+    # The event has new properties
+    new_event_rec = db.Record(
+        id=new_dataset_rec.get_property("Event").value[0]).retrieve()
+    assert new_event_rec.get_property("longitude").value == 0.0
+    assert new_event_rec.get_property("latitude").value == 0.0
+    assert new_event_rec.get_property("location").value == "Origin"
+    assert new_event_rec.get_property(
+        "start_datetime").value == old_event_rec.get_property("start_datetime").value
diff --git a/integrationtests/test_use_case_simple_presentation.py b/integrationtests/test_use_case_simple_presentation.py
new file mode 100644
index 0000000000000000000000000000000000000000..60f771cfacb6a055d8539c185e17eb75118117fa
--- /dev/null
+++ b/integrationtests/test_use_case_simple_presentation.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Alexander Schlemmer
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+#
+
+"""
+module description
+"""
+import os
+import pytest
+from subprocess import run
+
+import caosdb as db
+from caosadvancedtools.loadFiles import loadpath
+from caosadvancedtools.models import parser as parser
+from caoscrawler.crawl import crawler_main
+
+# TODO: wait for release of this feature in pylib
+# from caosdb.utils.register_tests import clear_database, set_test_key
+# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+
+DATADIR = os.path.join(os.path.dirname(__file__), "test_data",
+                       "extroot", "use_case_simple_presentation")
+
+# TODO: remove this
+@pytest.fixture
+def clear_database():
+    # TODO(fspreck): Remove once the corresponding advancedtools function can be
+    # used.
+    ents = db.execute_query("FIND ENTITY WITH ID>99")
+    if ents:
+        ents.delete()
+
+
+def test_complete_crawler(
+        clear_database
+):
+    # Setup the data model:
+    model = parser.parse_model_from_yaml(os.path.join(DATADIR, "model.yml"))
+    model.sync_data_model(noquestion=True, verbose=False)
+
+    # Insert the data:
+    for path in [
+            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/ExperimentalData",
+            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/DataAnalysis"]:
+        loadpath(
+            path=path,
+            include=None,
+            exclude=None,
+            prefix="/",
+            dryrun=False,
+            forceAllowSymlinks=False)
+
+    crawler_main(DATADIR,
+                 os.path.join(DATADIR, "cfood.yml"),
+                 os.path.join(DATADIR, "identifiables.yml"),
+                 True,
+                 os.path.join(DATADIR, "provenance.yml"),
+                 False,
+                 "/use_case_simple_presentation")
+
+    res = db.execute_query("FIND Record Experiment")
+    assert len(res) == 1
+    assert res[0].get_property("identifier").value == "crawlertest"
+    assert res[0].get_property("date").value == "2022-03-16"
+
+    lf = db.File(id=res[0].get_property("mdfile").value).retrieve()
+    assert lf.path == "/ExperimentalData/data.md"
+
+    assert res[0].get_property("alpha").value == 16.0
+    assert res[0].get_property("alpha").unit == "km"
+
+    res_da = db.execute_query("FIND Record DataAnalysis")
+    assert len(res_da) == 1
+    assert res_da[0].get_property("sources").value[0] == res[0].id
+
+    lf = db.File(id=res_da[0].get_property("mdfile").value).retrieve()
+    assert lf.path == "/DataAnalysis/results.md"
diff --git a/setup.cfg b/setup.cfg
index 2f8d46b30ee04d68adc6aef69e1a04115bbc44d8..0351d56dec59ee0b33c10be1f825e5d1d04f8504 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -22,8 +22,10 @@ install_requires =
 	importlib-resources
 	caosdb
 	caosadvancedtools
-    yaml-header-tools
+    yaml-header-tools>=0.2.1
     pyyaml
+    odfpy #make optional
+    pandas
 
 [options.packages.find]
 where = src
diff --git a/src/caoscrawler/__init__.py b/src/caoscrawler/__init__.py
index 28ef97d421023ad41be65d9d0e6abac76fbef6fe..b65b9fd9d24b9519a52ca13d07e46c9d8f791a73 100644
--- a/src/caoscrawler/__init__.py
+++ b/src/caoscrawler/__init__.py
@@ -1 +1 @@
-from .crawl import Crawler
+from .crawl import Crawler, SecurityMode
diff --git a/src/caoscrawler/authorize.py b/src/caoscrawler/authorize.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f1011b227881d4b73186996076abe20d94d52e5
--- /dev/null
+++ b/src/caoscrawler/authorize.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Henrik tom Wörden
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+from caosadvancedtools.crawler import Crawler as OldCrawler
+
+import argparse
+
+
+def parse_args():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("run_id",
+                        help="Run ID or the crawler run that created the changes that shall be "
+                        "authorized.")
+
+    return parser.parse_args()
+
+
+if __name__ == "__main__":
+    args = parse_args()
+    OldCrawler.update_authorized_changes(args.run_id)
diff --git a/src/caoscrawler/cfood-schema.yml b/src/caoscrawler/cfood-schema.yml
index 6505cde7601d89dea84fa80d1ab7c36b2eca6895..d7b5abfd1ac6c381b50bd4ce61015f1b8602b408 100644
--- a/src/caoscrawler/cfood-schema.yml
+++ b/src/caoscrawler/cfood-schema.yml
@@ -23,6 +23,8 @@ cfood:
           - Definitions
           - Dict
           - JSONFile
+          - CSVTableConverter
+          - XLSXTableConverter
           description: Type of this converter node.
         match:
           description: typically a regexp which is matched to a structure element name
diff --git a/src/caoscrawler/converters.py b/src/caoscrawler/converters.py
index b06cf1353eb8021f4e2fd44e1ff19fed83b9a122..4be30e1a31abacc4905b20b9c62a61978c6d8847 100644
--- a/src/caoscrawler/converters.py
+++ b/src/caoscrawler/converters.py
@@ -35,13 +35,14 @@ from .structure_elements import (StructureElement, Directory, File, Dict, JSONFi
                                  DictIntegerElement, DictBooleanElement,
                                  DictFloatElement, DictDictElement,
                                  TextElement, DictTextElement, DictElement, DictListElement)
-from typing import Optional, Union
-from abc import abstractmethod
+from typing import Dict as Dict_t, List, Optional, Tuple, Union
+from abc import ABCMeta, abstractmethod
 from string import Template
 import yaml_header_tools
 
 from caosdb.high_level_api import (CaosDBPythonEntity,
                                    create_entity_container)
+import pandas as pd
 import yaml
 
 # These are special properties which are (currently) treated differently
@@ -50,6 +51,32 @@ SPECIAL_PROPERTIES = ("description", "name", "id", "path",
                       "file", "checksum", "size")
 
 
+def _only_max(children_with_keys):
+
+    return [max(children_with_keys, key=lambda x: x[1])[0]]
+
+
+def _only_min(children_with_keys):
+
+    return [min(children_with_keys, key=lambda x: x[1])[0]]
+
+
+# names of functions that can be used to filter children
+FILTER_FUNCTIONS = {
+    "only_max": _only_max,
+    "only_min": _only_min,
+}
+
+
+def str_to_bool(x):
+    if str(x).lower() == "true":
+        return True
+    elif str(x).lower() == "false":
+        return False
+    else:
+        raise RuntimeError("Should be 'true' or 'false'.")
+
+
 class ConverterValidationError(Exception):
     """To be raised if contents of an element to be converted are invalid."""
 
@@ -57,13 +84,49 @@ class ConverterValidationError(Exception):
         self.message = msg
 
 
-def handle_value(value: Union[dict, str], values: GeneralStore):
+def replace_variables(propvalue, values: GeneralStore):
     """
-    Function to generically handle values for properties defined in the
-    yaml structure.
+    This function replaces variables in property values (and possibly other locations,
+    where the crawler can replace cfood-internal variables).
+
+    This function checks whether the value that is to be replaced is of type db.Entity.
+    In this case the entity is returned (note that this is of course only possible, if the
+    occurrence of the variable is directly at the beginning of the value and e.g. no string
+    concatenation is attempted.
+
+    In any other case the variable substitution is carried out and a new string with the
+    replaced variables is returned.
+    """
+    # Check if the replacement is a single variable containing a record:
+    match = re.match(r"^\$(\{)?(?P<varname>[0-9a-zA-Z_]+)(\})?$", propvalue)
+    if match is not None:
+        varname = match.group("varname")
+        if varname in values:
+            if values[varname] is None:
+                return None
+            if isinstance(values[varname], db.Entity):
+                return values[varname]
+
+    propvalue_template = Template(propvalue)
+    return propvalue_template.safe_substitute(**values.get_storage())
 
+
+def handle_value(value: Union[dict, str, list], values: GeneralStore):
+    """
+    determines whether the given value needs to set a property, be added to an existing value (create a list) or
+    add as an additional property (multiproperty).
+
+    Variable names (starting with a "$") are replaced by the corresponding value stored in the
+    `values` GeneralStore.
+
+    Parameters:
+    - value: if str, the value to be interpreted. E.g. "4", "hallo" or "$a" etc.
+             if dict, must have keys "value" and "collection_mode". The returned tuple is directly
+             created from the corresponding values.
+             if list, each element is checked for replacement and the resulting list will be used
+             as (list) value for the property
     Returns a tuple:
-    - the final value of the property
+    - the final value of the property; variable names contained in `values` are replaced.
     - the collection mode (can be single, list or multiproperty)
     """
     # @review Florian Spreckelsen 2022-05-13
@@ -92,22 +155,25 @@ def handle_value(value: Union[dict, str], values: GeneralStore):
         #       different from the two cases above.
         collection_mode = "single"
         propvalue = value
-        return (propvalue, collection_mode)
 
-    # Check if the replacement is a single variable containing a record:
-    match = re.match(r"^\$(\{)?(?P<varname>[0-9a-zA-Z_]+)(\})?$", propvalue)
-    if match is not None:
-        varname = match.group("varname")
-        if varname in values:
-            if values[varname] is None:
-                propvalue = None
-                return (propvalue, collection_mode)
-            if isinstance(values[varname], db.Entity):
-                propvalue = values[varname]
-                return (propvalue, collection_mode)
+        # variables replacement:
+        propvalue = list()
+        for element in value:
+            # Do the element-wise replacement only, when its type is string:
+            if type(element) == str:
+                propvalue.append(replace_variables(element, values))
+            else:
+                propvalue.append(element)
 
-    propvalue_template = Template(propvalue)
-    propvalue = propvalue_template.safe_substitute(**values.get_storage())
+        return (propvalue, collection_mode)
+    else:
+        # value is another simple type
+        collection_mode = "single"
+        propvalue = value
+        # Return it immediately, otherwise variable substitution would be done and fail:
+        return (propvalue, collection_mode)
+
+    propvalue = replace_variables(propvalue, values)
     return (propvalue, collection_mode)
 
 
@@ -115,7 +181,7 @@ def create_records(values: GeneralStore,
                    records: RecordStore,
                    def_records: dict):
     # list of keys to identify, which variables have been set by which paths:
-    # these are tuples:
+    # the items are tuples:
     # 0: record name
     # 1: property name
     keys_modified = []
@@ -145,6 +211,11 @@ def create_records(values: GeneralStore,
         for key, value in record.items():
             if key == "parents" or key == "role":
                 continue
+
+            # Allow replacing variables in keys / names of properties:
+            key_template = Template(key)
+            key = key_template.safe_substitute(**values.get_storage())
+
             keys_modified.append((name, key))
             propvalue, collection_mode = handle_value(value, values)
 
@@ -153,6 +224,9 @@ def create_records(values: GeneralStore,
                 # list mode does not work for them
                 if key == "path" and not propvalue.startswith(os.path.sep):
                     propvalue = os.path.sep + propvalue
+
+                    # Convert relative to absolute paths:
+                    propvalue = os.path.normpath(propvalue)
                 setattr(c_record, key, propvalue)
             else:
 
@@ -175,8 +249,10 @@ def create_records(values: GeneralStore,
         # parents will be added when they aren't present in the record yet:
         if "parents" in record:
             for parent in record["parents"]:
-                if not has_parent(c_record, parent):
-                    c_record.add_parent(parent)
+                # Do the variables replacement:
+                var_replaced_parent = replace_variables(parent, values)
+                if not has_parent(c_record, var_replaced_parent):
+                    c_record.add_parent(var_replaced_parent)
         else:
             # add the "fallback" parent only for Records, not for Files:
             if role == "Record":
@@ -186,7 +262,7 @@ def create_records(values: GeneralStore,
     return keys_modified
 
 
-class Converter(object):
+class Converter(object, metaclass=ABCMeta):
     """
     Converters treat StructureElements contained in the hierarchical sturcture.
     """
@@ -198,7 +274,7 @@ class Converter(object):
         self.name = name
 
         # Used to store usage information for debugging:
-        self.metadata: dict[str, set[str]] = {
+        self.metadata: Dict_t[str, set[str]] = {
             "usage": set()
         }
 
@@ -214,6 +290,10 @@ class Converter(object):
     def converter_factory(definition: dict,
                           name: str,
                           converter_registry: dict):
+        """creates a Converter instance of the appropriate class.
+
+        The `type` key in the `definition` defines the Converter class which is being used.
+        """
 
         if "type" not in definition:
             raise RuntimeError(
@@ -263,6 +343,31 @@ class Converter(object):
                               records,
                               self.definition["records"])
 
+    def filter_children(self, children_with_strings:
+                        List[Tuple[StructureElement, str]], expr: str,
+                        group: str, rule: str):
+        """Filter children according to regexp `expr` and `rule`."""
+
+        if rule not in FILTER_FUNCTIONS:
+            raise RuntimeError(
+                f"{rule} is not a known filter rule. Only {list(FILTER_FUNCTIONS.keys())} are implemented."
+            )
+
+        to_be_filtered = []
+        unmatched_children = []
+
+        for (child, name) in children_with_strings:
+
+            m = re.match(expr, name)
+            if m is None:
+                unmatched_children.append(child)
+            else:
+                to_be_filtered.append((child, m.groupdict()[group]))
+
+        filtered_children = FILTER_FUNCTIONS[rule](to_be_filtered)
+
+        return filtered_children+unmatched_children
+
     @abstractmethod
     def typecheck(self, element: StructureElement):
         pass
@@ -287,7 +392,15 @@ class DirectoryConverter(Converter):
             raise RuntimeError(
                 "Directory converters can only create children from directories.")
 
-        return self.create_children_from_directory(element)
+        children = self.create_children_from_directory(element)
+
+        if "filter" in self.definition:
+
+            tuple_list = [(c, c.name) for c in children]
+
+            return self.filter_children(tuple_list, **self.definition["filter"])
+
+        return children
 
     def typecheck(self, element: StructureElement):
         return isinstance(element, Directory)
@@ -308,7 +421,7 @@ class DirectoryConverter(Converter):
 
         element: A directory (of type Directory) which will be traversed.
         """
-        children: list[StructureElement] = []
+        children: List[StructureElement] = []
 
         for name in sorted(os.listdir(element.path)):
             path = os.path.join(element.path, name)
@@ -408,7 +521,7 @@ class MarkdownFileConverter(Converter):
 
         header = yaml_header_tools.get_header_from_file(
             element.path, clean=False)
-        children: list[StructureElement] = []
+        children: List[StructureElement] = []
 
         for name, entry in header.items():
             if type(entry) == list:
@@ -462,6 +575,8 @@ class DictConverter(Converter):
                 children.append(DictBooleanElement(name, value))
             elif type(value) == float:
                 children.append(DictFloatElement(name, value))
+            elif type(value) == type(None):
+                continue
             else:
                 children.append(DictElement(name, value))
                 warnings.warn(f"The value in the dict for key:{name} has an unknown type. "
@@ -482,6 +597,7 @@ class DictConverter(Converter):
         return {}
 
 
+# TODO: difference to SimpleFileConverter? Do we need both?
 class FileConverter(Converter):
     def typecheck(self, element: StructureElement):
         return isinstance(element, File)
@@ -513,6 +629,8 @@ class JSONFileConverter(DictConverter):
     def create_children(self, generalStore: GeneralStore, element: StructureElement):
         if not self.typecheck(element):
             raise RuntimeError("A JSON file is needed to create children")
+        # TODO: either add explicit time check for File structure element here,
+        #       or add a comment to suppress mypy type warning.
         with open(element.path, 'r') as json_file:
             json_data = json.load(json_file)
         if not isinstance(json_data, dict):
@@ -660,3 +778,103 @@ class TextElementConverter(Converter):
         if m is None:
             return None
         return m.groupdict()
+
+
+class TableConverter(Converter):
+    """
+    This converter reads tables in different formats line by line and
+    allows matching the corresponding rows.
+
+    The subtree generated by the table converter consists of DictDictElements, each being
+    a row. The corresponding header elements will become the dictionary keys.
+
+    The rows can be matched using a DictDictElementConverter.
+    """
+    @abstractmethod
+    def get_options(self):
+        """
+        This method needs to be overwritten by the specific table converter to provide
+        information about the possible options.
+        """
+        pass
+
+    def _get_options(self, possible_options):
+        option_dict = dict()
+        for opt_name, opt_conversion in possible_options:
+            if opt_name in self.definition:
+                el = self.definition[opt_name]
+                # The option can often either be a single value or a list of values.
+                # In the latter case each element of the list will be converted to the defined type.
+                if isinstance(el, list):
+                    option_dict[opt_name] = [
+                        opt_conversion(el_el) for el_el in el]
+                else:
+                    option_dict[opt_name] = opt_conversion(el)
+        return option_dict
+
+    def typecheck(self, element: StructureElement):
+        return isinstance(element, File)
+
+    def match(self, element: StructureElement):
+        if not isinstance(element, File):
+            raise RuntimeError("Element must be a File.")
+        m = re.match(self.definition["match"], element.name)
+        if m is None:
+            return None
+        return m.groupdict()
+
+
+class XLSXTableConverter(TableConverter):
+    def get_options(self):
+        return self._get_options([
+            ("sheet_name", str),
+            ("header", int),
+            ("names", str),
+            ("index_col", int),
+            ("usecols", int),
+            ("true_values", str),
+            ("false_values", str),
+            ("na_values", str),
+            ("skiprows", int),
+            ("nrows", int),
+            ("keep_default_na", str_to_bool), ]
+        )
+
+    def create_children(self, generalStore: GeneralStore,
+                        element: StructureElement):
+        if not isinstance(element, File):
+            raise RuntimeError("Element must be a File.")
+        table = pd.read_excel(element.path, **self.get_options())
+        child_elements = list()
+        for index, row in table.iterrows():
+            child_elements.append(
+                DictDictElement(str(index), row.to_dict()))
+        return child_elements
+
+
+class CSVTableConverter(TableConverter):
+    def get_options(self):
+        return self._get_options([
+            ("sep", str),
+            ("delimiter", str),
+            ("header", int),
+            ("names", str),
+            ("index_col", int),
+            ("usecols", int),
+            ("true_values", str),
+            ("false_values", str),
+            ("na_values", str),
+            ("skiprows", int),
+            ("nrows", int),
+            ("keep_default_na", str_to_bool), ])
+
+    def create_children(self, generalStore: GeneralStore,
+                        element: StructureElement):
+        if not isinstance(element, File):
+            raise RuntimeError("Element must be a File.")
+        table = pd.read_csv(element.path, **self.get_options())
+        child_elements = list()
+        for index, row in table.iterrows():
+            child_elements.append(
+                DictDictElement(str(index), row.to_dict()))
+        return child_elements
diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 6586384f1a0bca9ce71b2b131b7ea3a4fe8cac3d..77b737defedbf24371026e6ff0957fab03f1ed4a 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -28,13 +28,19 @@ Crawl a file structure using a yaml cfood definition and synchronize
 the acuired data with CaosDB.
 """
 
+import importlib
+from caosadvancedtools.cache import UpdateCache, Cache
+import uuid
 import sys
 import os
 import yaml
+from enum import Enum
+import logging
 from importlib_resources import files
 import argparse
 from argparse import RawTextHelpFormatter
 import caosdb as db
+from caosadvancedtools.crawler import Crawler as OldCrawler
 from caosdb.common.datatype import is_reference
 from .stores import GeneralStore, RecordStore
 from .identified_cache import IdentifiedCache
@@ -44,18 +50,24 @@ from .identifiable_adapters import (IdentifiableAdapter,
                                     LocalStorageIdentifiableAdapter,
                                     CaosDBIdentifiableAdapter)
 from collections import defaultdict
-from typing import Union, Any, Optional, Type
+from typing import Any, Dict, List, Optional, Type, Union
 from caosdb.apiutils import compare_entities, merge_entities
 from copy import deepcopy
 from jsonschema import validate
 
 from caosdb.high_level_api import convert_to_python_object
+from .macros import defmacro_constructor, macro_constructor
 
-import importlib
+logger = logging.getLogger(__name__)
 
 SPECIAL_PROPERTIES_STRICT = ("description", "name", "id", "path")
 SPECIAL_PROPERTIES_NOT_STRICT = ("file", "checksum", "size")
 
+# Register the macro functions from the submodule:
+yaml.SafeLoader.add_constructor("!defmacro", defmacro_constructor)
+yaml.SafeLoader.add_constructor("!macro", macro_constructor)
+
+
 def check_identical(record1: db.Entity, record2: db.Entity, ignore_id=False):
     """
     This function uses compare_entities to check whether to entities are identical
@@ -102,7 +114,7 @@ def check_identical(record1: db.Entity, record2: db.Entity, ignore_id=False):
             return False
         for attribute in ("datatype", "importance", "unit"):
             # only make an update for those attributes if there is a value difference and
-            # the value in the updateList is not None
+            # the value in the target_data is not None
             if attribute in comp[0]["properties"][key]:
                 attr_val = comp[0]["properties"][key][attribute]
                 other_attr_val = (comp[1]["properties"][key][attribute]
@@ -123,6 +135,7 @@ def check_identical(record1: db.Entity, record2: db.Entity, ignore_id=False):
 
 
 def _resolve_datatype(prop: db.Property, remote_entity: db.Entity):
+    """ sets the datatype on the given property (side effect) """
 
     if remote_entity.role == "Property":
         datatype = remote_entity.datatype
@@ -139,6 +152,12 @@ def _resolve_datatype(prop: db.Property, remote_entity: db.Entity):
     return prop
 
 
+class SecurityMode(Enum):
+    RETRIEVE = 0
+    INSERT = 1
+    UPDATE = 2
+
+
 class Crawler(object):
     """
     Crawler class that encapsulates crawling functions.
@@ -146,30 +165,40 @@ class Crawler(object):
     storage for values (general store).
     """
 
-    def __init__(self, converters: list[Converter] = [],
+    def __init__(self,
                  generalStore: Optional[GeneralStore] = None,
                  debug: bool = False,
-                 identifiableAdapter: IdentifiableAdapter = None):
+                 identifiableAdapter: IdentifiableAdapter = None,
+                 securityMode: int = SecurityMode.UPDATE
+                 ):
         """
         Create a new crawler and initialize an empty RecordStore and GeneralStore.
 
-        converters: The set of converters used for this crawler.
-        recordStore: An initial GeneralStore which might store e.g. environment variables.
-
-        debug: Create a debugging information tree when set to True.
-               The debugging information tree is a variable stored in
-               self.debug_tree. It is a dictionary mapping directory entries
-               to a tuple of general stores and record stores which are valid for the directory scope.
-               Furthermore, it is stored in a second tree named self.debug_copied whether the
-               objects in debug_tree had been copied from a higher level in the hierarchy
-               of the structureelements.
+        Parameters
+        ----------
+        recordStore : GeneralStore
+             An initial GeneralStore which might store e.g. environment variables.
+        debug : bool
+             Create a debugging information tree when set to True.
+             The debugging information tree is a variable stored in
+             self.debug_tree. It is a dictionary mapping directory entries
+             to a tuple of general stores and record stores which are valid for
+             the directory scope.
+             Furthermore, it is stored in a second tree named self.debug_copied whether the
+             objects in debug_tree had been copied from a higher level in the hierarchy
+             of the structureelements.
+        identifiableAdapter : IdentifiableAdapter
+             TODO describe
+        securityMode : int
+             Whether only retrieves are allowed or also inserts or even updates.
+             Please use SecurityMode Enum
         """
 
         # TODO: check if this feature is really needed
-        self.global_converters = converters
 
         self.identified_cache = IdentifiedCache()
         self.recordStore = RecordStore()
+        self.securityMode = securityMode
 
         self.generalStore = generalStore
         if generalStore is None:
@@ -178,14 +207,15 @@ class Crawler(object):
         self.identifiableAdapter = identifiableAdapter
         if identifiableAdapter is None:
             self.identifiableAdapter = LocalStorageIdentifiableAdapter()
-
+        # If a directory is crawled this may hold the path to that directory
+        self.crawled_directory = None
         self.debug = debug
         if self.debug:
             # order in the tuple:
             # 0: generalStore
             # 1: recordStore
-            self.debug_tree: dict[str, tuple] = dict()
-            self.debug_metadata: dict[str, dict] = dict()
+            self.debug_tree: Dict[str, tuple] = dict()
+            self.debug_metadata: Dict[str, dict] = dict()
             self.debug_metadata["copied"] = dict()
             self.debug_metadata["provenance"] = defaultdict(lambda: dict())
             self.debug_metadata["usage"] = defaultdict(lambda: set())
@@ -198,7 +228,29 @@ class Crawler(object):
 
         # Load the cfood from a yaml file:
         with open(crawler_definition_path, "r") as f:
-            crawler_definition = yaml.safe_load(f)
+            crawler_definitions = list(yaml.safe_load_all(f))
+
+        crawler_definition = self._load_definition_from_yaml_dict(
+            crawler_definitions)
+
+        return self._resolve_validator_paths(crawler_definition, crawler_definition_path)
+
+    def _load_definition_from_yaml_dict(self, crawler_definitions: List[Dict]):
+        """Load crawler definitions from a list of (yaml) dicts `crawler_definitions` which
+        contains either one or two documents.
+
+        Doesn't resolve the validator paths in the cfood definition, so for
+        internal and testing use only.
+
+        """
+        if len(crawler_definitions) == 1:
+            # Simple case, just one document:
+            crawler_definition = crawler_definitions[0]
+        elif len(crawler_definitions) == 2:
+            crawler_definition = crawler_definitions[1]
+        else:
+            raise RuntimeError(
+                "Crawler definition must not contain more than two documents.")
 
         # TODO: at this point this function can already load the cfood schema extensions
         #       from the crawler definition and add them to the yaml schema that will be
@@ -213,11 +265,16 @@ class Crawler(object):
             for key in crawler_definition["Converters"]:
                 schema["cfood"]["$defs"]["converter"]["properties"]["type"]["enum"].append(
                     key)
+        if len(crawler_definitions) == 2:
+            if "Converters" in crawler_definitions[0]["metadata"]:
+                for key in crawler_definitions[0]["metadata"]["Converters"]:
+                    schema["cfood"]["$defs"]["converter"]["properties"]["type"]["enum"].append(
+                        key)
 
         # Validate the cfood schema:
         validate(instance=crawler_definition, schema=schema["cfood"])
 
-        return self._resolve_validator_paths(crawler_definition, crawler_definition_path)
+        return crawler_definition
 
     def _resolve_validator_paths(self, definition: dict, definition_path: str):
         """Resolve path to validation files with respect to the file in which
@@ -257,7 +314,7 @@ class Crawler(object):
         """
 
         # Defaults for the converter registry:
-        converter_registry: dict[str, dict[str, str]] = {
+        converter_registry: Dict[str, Dict[str, str]] = {
             "Directory": {
                 "converter": "DirectoryConverter",
                 "package": "caoscrawler.converters"},
@@ -276,6 +333,12 @@ class Crawler(object):
             "JSONFile": {
                 "converter": "JSONFileConverter",
                 "package": "caoscrawler.converters"},
+            "CSVTableConverter": {
+                "converter": "CSVTableConverter",
+                "package": "caoscrawler.converters"},
+            "XLSXTableConverter": {
+                "converter": "XLSXTableConverter",
+                "package": "caoscrawler.converters"},
             "Dict": {
                 "converter": "DictConverter",
                 "package": "caoscrawler.converters"},
@@ -331,6 +394,7 @@ class Crawler(object):
             raise ValueError(
                 "You have to provide a non-empty path for crawling.")
         dir_structure_name = os.path.basename(dirname)
+        self.crawled_directory = dirname
         if not dir_structure_name and dirname.endswith('/'):
             if dirname == '/':
                 # Crawling the entire file system
@@ -345,9 +409,13 @@ class Crawler(object):
                             converter_registry)
 
     @staticmethod
-    def create_local_converters(crawler_definition: dict,
-                                converter_registry: dict):
-        local_converters = []
+    def initialize_converters(crawler_definition: dict, converter_registry: dict):
+        """
+        takes the cfood as dict (`crawler_definition`) and creates the converter objects that
+        are defined on the highest level. Child Converters will in turn be created during the
+        initialization of the Converters.
+        """
+        converters = []
 
         for key, value in crawler_definition.items():
             # Definitions and Converters are reserved keywords
@@ -359,23 +427,30 @@ class Crawler(object):
                 continue
             elif key == "Converters":
                 continue
-            local_converters.append(Converter.converter_factory(
+            converters.append(Converter.converter_factory(
                 value, key, converter_registry))
 
-        return local_converters
+        return converters
 
-    def start_crawling(self, items: Union[list[StructureElement], StructureElement],
+    def start_crawling(self, items: Union[List[StructureElement], StructureElement],
                        crawler_definition: dict,
                        converter_registry: dict):
         """
         Start point of the crawler recursion.
 
-        items: A list of structure elements (or a single StructureElemen) that is used for
-               generating the initial items for the crawler. This could e.g. be a Directory.
-        crawler_definition: A dictionary representing the crawler definition, possibly from a yaml
-              file.
+        Parameters
+        ----------
+        items: list
+             A list of structure elements (or a single StructureElement) that is used for
+             generating the initial items for the crawler. This could e.g. be a Directory.
+        crawler_definition : dict
+             A dictionary representing the crawler definition, possibly from a yaml
+             file.
 
-        Return the final update list.
+        Returns
+        -------
+        target_data : list
+            the final list with the target state of Records.
         """
 
         # This function builds the tree of converters out of the crawler definition.
@@ -386,20 +461,20 @@ class Crawler(object):
         if not isinstance(items, list):
             items = [items]
 
-        local_converters = Crawler.create_local_converters(crawler_definition,
-                                                           converter_registry)
+        self.run_id = uuid.uuid1()
+        local_converters = Crawler.initialize_converters(
+            crawler_definition, converter_registry)
         # This recursive crawling procedure generates the update list:
-        self.updateList: list[db.Record] = []
-        self._crawl(items,
-                    self.global_converters, local_converters, self.generalStore, self.recordStore,
-                    [], [])
+        self.target_data: List[db.Record] = []
+        self._crawl(items, local_converters, self.generalStore,
+                    self.recordStore, [], [])
 
         if self.debug:
-            self.debug_converters = self.global_converters + local_converters
+            self.debug_converters = local_converters
 
-        return self.updateList
+        return self.target_data
 
-    def synchronize(self, commit_changes: bool = True):
+    def synchronize(self, commit_changes: bool = True, unique_names=True):
         """
         Carry out the actual synchronization.
         """
@@ -407,7 +482,7 @@ class Crawler(object):
         # After the crawling, the actual synchronization with the database, based on the
         # update list is carried out:
 
-        return self._synchronize(self.updateList, commit_changes)
+        return self._synchronize(self.target_data, commit_changes, unique_names=unique_names)
 
     def can_be_checked_externally(self, record: db.Record):
         """
@@ -434,7 +509,7 @@ class Crawler(object):
                 return False
         return True
 
-    def create_flat_list(self, ent_list: list[db.Entity], flat: list[db.Entity]):
+    def create_flat_list(self, ent_list: List[db.Entity], flat: List[db.Entity]):
         """
         Recursively adds all properties contained in entities from ent_list to
         the output list flat. Each element will only be added once to the list.
@@ -567,11 +642,11 @@ class Crawler(object):
 
         merge_entities(to, fro)
 
-    def split_into_inserts_and_updates(self, ent_list: list[db.Entity]):
+    def split_into_inserts_and_updates(self, ent_list: List[db.Entity]):
         if self.identifiableAdapter is None:
             raise RuntimeError("Should not happen.")
-        to_be_inserted: list[db.Entity] = []
-        to_be_updated: list[db.Entity] = []
+        to_be_inserted: List[db.Entity] = []
+        to_be_updated: List[db.Entity] = []
         flat = list(ent_list)
         # assure all entities are direct members TODO Can this be removed at some point?Check only?
         self.create_flat_list(ent_list, flat)
@@ -687,19 +762,20 @@ class Crawler(object):
 
         return to_be_inserted, to_be_updated
 
-    # TODO: replace _by_ with _with_
-    def replace_entities_by_ids(self, rec: db.Record):
+    def replace_entities_with_ids(self, rec: db.Record):
         for el in rec.properties:
             if isinstance(el.value, db.Entity):
-                el.value = el.value.id
+                if el.value.id is not None:
+                    el.value = el.value.id
             elif isinstance(el.value, list):
                 for index, val in enumerate(el.value):
                     if isinstance(val, db.Entity):
-                        el.value[index] = val.id
+                        if val.id is not None:
+                            el.value[index] = val.id
 
     @staticmethod
-    def remove_unnecessary_updates(updateList: list[db.Record],
-                                   identified_records: list[db.Record]):
+    def remove_unnecessary_updates(target_data: List[db.Record],
+                                   identified_records: List[db.Record]):
         """
         checks whether all relevant attributes (especially Property values) are equal
 
@@ -708,52 +784,116 @@ class Crawler(object):
         update list without unecessary updates
 
         """
-        if len(updateList) != len(identified_records):
+        if len(target_data) != len(identified_records):
             raise RuntimeError("The lists of updates and of identified records need to be of the "
                                "same length!")
         # TODO this can now easily be changed to a function without side effect
-        for i in reversed(range(len(updateList))):
-            identical = check_identical(updateList[i], identified_records[i])
+        for i in reversed(range(len(target_data))):
+            identical = check_identical(target_data[i], identified_records[i])
 
             if identical:
-                del updateList[i]
+                del target_data[i]
                 continue
             else:
                 pass
 
     @staticmethod
-    def execute_inserts_in_list(to_be_inserted):
+    def execute_parent_updates_in_list(to_be_updated, securityMode, run_id, unique_names):
+        """
+        Execute the updates of changed parents.
+
+        This method is used before the standard inserts and needed
+        because some changes in parents (e.g. of Files) might fail
+        if they are not updated first.
+        """
+        Crawler.set_ids_and_datatype_of_parents_and_properties(to_be_updated)
+        parent_updates = db.Container()
+
+        for record in to_be_updated:
+            old_entity = Crawler._get_entity_by_id(record.id)
+
+            # Check whether the parents have been changed and add them if missing
+            # in the old entity:
+            changes_made = False
+            for parent in record.parents:
+                found = False
+                for old_parent in old_entity.parents:
+                    if old_parent.id == parent.id:
+                        found = True
+                        break
+                if not found:
+                    old_entity.add_parent(id=parent.id)
+                    changes_made = True
+            if changes_made:
+                parent_updates.append(old_entity)
+        logger.debug("RecordTypes need to be added to the following entities:")
+        logger.debug(parent_updates)
+        if len(parent_updates) > 0:
+            if securityMode.value > SecurityMode.INSERT.value:
+                parent_updates.update(unique=False)
+            elif run_id is not None:
+                update_cache = UpdateCache()
+                update_cache.insert(parent_updates, run_id)
+                logger.info("Some entities need to be updated because they are missing a parent "
+                            "RecordType. The update was NOT executed due to the chosen security "
+                            "mode. This might lead to a failure of inserts that follow.")
+                logger.info(parent_updates)
+
+    @staticmethod
+    def _get_entity_by_name(name):
+        return db.Entity(name=name).retrieve()
+
+    @staticmethod
+    def _get_entity_by_id(id):
+        return db.Entity(id=id).retrieve()
+
+    @staticmethod
+    def execute_inserts_in_list(to_be_inserted, securityMode, run_id: int = None,
+                                unique_names=True):
         for record in to_be_inserted:
             for prop in record.properties:
-                entity = db.Entity(name=prop.name).retrieve()
-                prop = _resolve_datatype(prop, entity)
-        print("INSERT")
-        print(to_be_inserted)
+                entity = Crawler._get_entity_by_name(prop.name)
+                _resolve_datatype(prop, entity)
+        logger.debug("INSERT")
+        logger.debug(to_be_inserted)
         if len(to_be_inserted) > 0:
-            db.Container().extend(to_be_inserted).insert()
+            if securityMode.value > SecurityMode.RETRIEVE.value:
+                db.Container().extend(to_be_inserted).insert(unique=unique_names)
+            elif run_id is not None:
+                update_cache = UpdateCache()
+                update_cache.insert(to_be_inserted, run_id, insert=True)
 
     @staticmethod
-    def execute_updates_in_list(to_be_updated):
-        # retrieve ids of properties when missing:
-        for record in to_be_updated:
+    def set_ids_and_datatype_of_parents_and_properties(rec_list):
+        for record in rec_list:
             for parent in record.parents:
                 if parent.id is None:
-                    parent.id = db.Entity(name=parent.name).retrieve().id
+                    parent.id = Crawler._get_entity_by_name(parent.name).id
             for prop in record.properties:
                 if prop.id is None:
-                    entity = db.Entity(name=prop.name).retrieve()
+                    entity = Crawler._get_entity_by_name(prop.name)
                     prop.id = entity.id
-                    prop = _resolve_datatype(prop, entity)
-        print("UPDATE")
-        print(to_be_updated)
-        if len(to_be_updated) > 0:
-            db.Container().extend(to_be_updated).update()
+                    _resolve_datatype(prop, entity)
 
-    def _synchronize(self, updateList: list[db.Record], commit_changes: bool = True):
+    @staticmethod
+    def execute_updates_in_list(to_be_updated, securityMode, run_id: int = None,
+                                unique_names=True):
+        Crawler.set_ids_and_datatype_of_parents_and_properties(to_be_updated)
+        logger.debug("UPDATE")
+        logger.debug(to_be_updated)
+        if len(to_be_updated) > 0:
+            if securityMode.value > SecurityMode.INSERT.value:
+                db.Container().extend(to_be_updated).update(unique=unique_names)
+            elif run_id is not None:
+                update_cache = UpdateCache()
+                update_cache.insert(to_be_updated, run_id)
+
+    def _synchronize(self, target_data: List[db.Record], commit_changes: bool = True,
+                     unique_names=True):
         """
         This function applies several stages:
-        1) Retrieve identifiables for all records in updateList.
-        2) Compare updateList with existing records.
+        1) Retrieve identifiables for all records in target_data.
+        2) Compare target_data with existing records.
         3) Insert and update records based on the set of identified differences.
 
         This function makes use of an IdentifiableAdapter which is used to retrieve
@@ -762,33 +902,74 @@ class Crawler(object):
         if commit_changes is True, the changes are synchronized to the CaosDB server.
         For debugging in can be useful to set this to False.
 
-        Return the final insertList and updateList as tuple.
+        Return the final to_be_inserted and to_be_updated as tuple.
         """
 
         if self.identifiableAdapter is None:
             raise RuntimeError("Should not happen.")
 
         to_be_inserted, to_be_updated = self.split_into_inserts_and_updates(
-            updateList)
+            target_data)
 
-        # remove unnecessary updates from list
         # TODO: refactoring of typo
         for el in to_be_updated:
-            self.replace_entities_by_ids(el)
-
-        identified_records = [self.identifiableAdapter.retrieve_identified_record_for_record(record) for record
-                              in to_be_updated]
+            # all entity objects are replaced by their IDs except for the not yet inserted ones
+            self.replace_entities_with_ids(el)
+
+        identified_records = [
+            self.identifiableAdapter.retrieve_identified_record_for_record(
+                record)
+            for record in to_be_updated]
+        # remove unnecessary updates from list by comparing the target records to the existing ones
         self.remove_unnecessary_updates(to_be_updated, identified_records)
 
         if commit_changes:
-            self.execute_inserts_in_list(to_be_inserted)
-            self.execute_updates_in_list(to_be_updated)
+            self.execute_parent_updates_in_list(to_be_updated, securityMode=self.securityMode,
+                                                run_id=self.run_id, unique_names=unique_names)
+            self.execute_inserts_in_list(
+                to_be_inserted, self.securityMode, self.run_id, unique_names=unique_names)
+            self.execute_updates_in_list(
+                to_be_updated, self.securityMode, self.run_id, unique_names=unique_names)
+
+        update_cache = UpdateCache()
+        pending_inserts = update_cache.get_inserts(self.run_id)
+        if pending_inserts:
+            Crawler.inform_about_pending_changes(
+                pending_inserts, self.run_id, self.crawled_directory)
+
+        pending_updates = update_cache.get_updates(self.run_id)
+        if pending_updates:
+            Crawler.inform_about_pending_changes(
+                pending_updates, self.run_id, self.crawled_directory)
 
         return (to_be_inserted, to_be_updated)
 
+    @staticmethod
+    def inform_about_pending_changes(pending_changes, run_id, path, inserts=False):
+        # Sending an Email with a link to a form to authorize updates is
+        # only done in SSS mode
+
+        if "SHARED_DIR" in os.environ:
+            filename = OldCrawler.save_form(
+                [el[3] for el in pending_changes], path, run_id)
+            OldCrawler.send_mail([el[3] for el in pending_changes], filename)
+
+        for i, el in enumerate(pending_changes):
+
+            logger.debug(
+                """
+UNAUTHORIZED UPDATE ({} of {}):
+____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
+        logger.info("There were unauthorized changes (see above). An "
+                    "email was sent to the curator.\n"
+                    "You can authorize the " +
+                    ("inserts" if inserts else "updates")
+                    + " by invoking the crawler"
+                    " with the run id: {rid}\n".format(rid=run_id))
+
     @staticmethod
     def debug_build_usage_tree(converter: Converter):
-        res: dict[str, dict[str, Any]] = {
+        res: Dict[str, Dict[str, Any]] = {
             converter.name: {
                 "usage": ", ".join(converter.metadata["usage"]),
                 "subtree": {}
@@ -805,7 +986,7 @@ class Crawler(object):
         return res
 
     def save_debug_data(self, filename: str):
-        paths: dict[str, Union[dict, list]] = dict()
+        paths: Dict[str, Union[dict, list]] = dict()
 
         def flatten_debug_info(key):
             mod_info = self.debug_metadata[key]
@@ -830,17 +1011,16 @@ class Crawler(object):
         with open(filename, "w") as f:
             f.write(yaml.dump(paths, sort_keys=False))
 
-    def _crawl(self, items: list[StructureElement],
-               global_converters: list[Converter],
-               local_converters: list[Converter],
+    def _crawl(self, items: List[StructureElement],
+               local_converters: List[Converter],
                generalStore: GeneralStore,
                recordStore: RecordStore,
-               structure_elements_path: list[str], converters_path: list[str]):
+               structure_elements_path: List[str], converters_path: List[str]):
         """
         Crawl a list of StructureElements and apply any matching converters.
 
         items: structure_elements (e.g. files and folders on one level on the hierarchy)
-        global_converters and local_converters: globally or locally defined converters for
+        local_converters: locally defined converters for
                             treating structure elements. A locally defined converter could be
                             one that is only valid for a specific subtree of the originally
                             cralwed StructureElement structure.
@@ -848,7 +1028,8 @@ class Crawler(object):
                             global stores of the Crawler object.
         """
         for element in items:
-            for converter in global_converters + local_converters:
+            for converter in local_converters:
+
                 # type is something like "matches files", replace isinstance with "type_matches"
                 # match function tests regexp for example
                 if (converter.typecheck(element) and
@@ -874,7 +1055,8 @@ class Crawler(object):
                         self.debug_tree[str(element)] = (
                             generalStore_copy.get_storage(), recordStore_copy.get_storage())
                         self.debug_metadata["copied"][str(element)] = (
-                            generalStore_copy.get_dict_copied(), recordStore_copy.get_dict_copied())
+                            generalStore_copy.get_dict_copied(),
+                            recordStore_copy.get_dict_copied())
                         self.debug_metadata["usage"][str(element)].add(
                             "/".join(converters_path + [converter.name]))
                         mod_info = self.debug_metadata["provenance"]
@@ -885,10 +1067,11 @@ class Crawler(object):
                             record_identifier = record_name + \
                                 "_" + str(internal_id)
                             converter.metadata["usage"].add(record_identifier)
-                            mod_info[record_identifier][prop_name] = (structure_elements_path + [element.get_name()],
-                                                                      converters_path + [converter.name])
+                            mod_info[record_identifier][prop_name] = (
+                                structure_elements_path + [element.get_name()],
+                                converters_path + [converter.name])
 
-                    self._crawl(children, global_converters, converter.converters,
+                    self._crawl(children, converter.converters,
                                 generalStore_copy, recordStore_copy,
                                 structure_elements_path + [element.get_name()],
                                 converters_path + [converter.name])
@@ -897,7 +1080,7 @@ class Crawler(object):
         # to the general update container.
         scoped_records = recordStore.get_records_current_scope()
         for record in scoped_records:
-            self.updateList.append(record)
+            self.target_data.append(record)
 
         # TODO: the scoped variables should be cleaned up as soon if the variables
         #       are no longer in the current scope. This can be implemented as follows,
@@ -910,37 +1093,59 @@ class Crawler(object):
         #     del recordStore[name]
         #     del generalStore[name]
 
-        return self.updateList
+        return self.target_data
 
 
-def crawler_main(args_path,
-                 args_cfood,
-                 args_load_identifiables,
-                 args_debug,
-                 args_provenance,
-                 args_dry_sync,
-                 args_sync,
-                 args_prefix):
-    crawler = Crawler(debug=args_debug)
-    crawler.crawl_directory(args_path, args_cfood)
-    if args_provenance is not None:
-        crawler.save_debug_data(args_provenance)
+def crawler_main(crawled_directory_path: str,
+                 cfood_file_name: str,
+                 identifiables_definition_file: str = None,
+                 debug: bool = False,
+                 provenance_file: str = None,
+                 dry_run: bool = False,
+                 prefix: str = "",
+                 securityMode: int = SecurityMode.UPDATE,
+                 unique_names=True,
+                 ):
+    """
+
+    Parameters
+    ----------
+    crawled_directory_path : str
+        path to be crawled
+    cfood_file_name : str
+        filename of the cfood to be used
+    identifiables_definition_file : str
+        filename of an identifiable definition yaml file
+    debug : bool
+        whether or not to run in debug mode
+    provenance_file : str
+        provenance information will be stored in a file with given filename
+    dry_run : bool
+        do not commit any chnages to the server
+    prefix : str
+        remove the given prefix from file paths
+    securityMode : int
+        securityMode of Crawler
+    unique_names : bool
+        whether or not to update or insert entities inspite of name conflicts
+
+    Returns
+    -------
+    return_value : int
+        0 if successful
+    """
+    crawler = Crawler(debug=debug, securityMode=securityMode)
+    crawler.crawl_directory(crawled_directory_path, cfood_file_name)
+    if provenance_file is not None:
+        crawler.save_debug_data(provenance_file)
 
-    if args_load_identifiables is not None:
+    if identifiables_definition_file is not None:
 
         ident = CaosDBIdentifiableAdapter()
-        ident.load_from_yaml_definition(args_load_identifiables)
+        ident.load_from_yaml_definition(identifiables_definition_file)
         crawler.identifiableAdapter = ident
 
-        # TODO: What is the purpose of the following code:
-        # for k, v in identifiable_data.items():
-        #     rt = db.RecordType()
-        #     rt.add_parent(k)
-        #     for pn in v:
-        #         rt.add_property(name=pn)
-        #     ident.register_identifiable(k, rt)
-            
-    if args_dry_sync:
+    if dry_run:
         ins, upd = crawler.synchronize(commit_changes=False)
         inserts = [convert_to_python_object(i).serialize() for i in ins]
         updates = [convert_to_python_object(i).serialize() for i in upd]
@@ -948,14 +1153,17 @@ def crawler_main(args_path,
             f.write(yaml.dump({
                 "insert": inserts,
                 "update": updates}))
-    elif args_sync:
+    else:
         rtsfinder = dict()
-        for elem in crawler.updateList:
+        for elem in crawler.target_data:
             if isinstance(elem, db.File):
                 # correct the file path:
                 # elem.file = os.path.join(args.path, elem.file)
-                if elem.path.startswith(args_prefix):
-                    elem.path = elem.path[len(args_prefix):]
+                if prefix is None:
+                    raise RuntimeError(
+                        "No prefix set. Prefix must be set if files are used.")
+                if elem.path.startswith(prefix):
+                    elem.path = elem.path[len(prefix):]
                 elem.file = None
                 # TODO: as long as the new file backend is not finished
                 #       we are using the loadFiles function to insert symlinks.
@@ -980,36 +1188,36 @@ def crawler_main(args_path,
             raise RuntimeError("Missing RecordTypes: {}".
                                format(", ".join(notfound)))
 
-        crawler.synchronize(commit_changes=True)
+        crawler.synchronize(commit_changes=True, unique_names=unique_names)
     return 0
 
 
 def parse_args():
     parser = argparse.ArgumentParser(description=__doc__,
                                      formatter_class=RawTextHelpFormatter)
-    parser.add_argument("cfood",
+    parser.add_argument("cfood_file_name",
                         help="Path name of the cfood yaml file to be used.")
     parser.add_argument("--provenance", required=False,
                         help="Path name of the provenance yaml file. "
                         "This file will only be generated if this option is set.")
     parser.add_argument("--debug", required=False, action="store_true",
                         help="Path name of the cfood yaml file to be used.")
-    parser.add_argument("path",
+    parser.add_argument("crawled_directory_path",
                         help="The subtree of files below the given path will "
                         "be considered. Use '/' for everything.")
-
-    parser.add_argument("-n", "--dry-sync", action="store_true",
+    parser.add_argument("-s", "--security-mode", choices=["retrieve", "insert", "update"],
+                        default="retrieve",
+                        help="Determines whether entities may only be read from the server, or "
+                        "whether inserts or even updates may be done.")
+    parser.add_argument("-n", "--dry-run", action="store_true",
                         help="Create two files dry.yml to show"
                         "what would actually be committed without doing the synchronization.")
 
     # TODO: load identifiables is a dirty implementation currently
     parser.add_argument("-i", "--load-identifiables",
-                        help="Load identifiables from "
-                        "the given yaml file.")
-    parser.add_argument("-s", "--sync", action="store_true",
-                        help="Do the synchronization. This is probably the expected "
-                        "standard behavior of the crawler.")
-
+                        help="Load identifiables from the given yaml file.")
+    parser.add_argument("-u", "--unique-names",
+                        help="Insert or updates entities even if name conflicts exist.")
     parser.add_argument("-p", "--prefix",
                         help="Remove the given prefix from the paths "
                         "of all file objects.")
@@ -1019,17 +1227,31 @@ def parse_args():
 
 def main():
     args = parse_args()
-    return crawler_main(
-        args.path,
-        args.cfood,
-        args.load_identifiables,
-        args.debug,
-        args.provenance,
-        args.dry_sync,
-        args.sync,
-        args.prefix
-    )
+
+    conlogger = logging.getLogger("connection")
+    conlogger.setLevel(level=logging.ERROR)
+
+    # logging config for local execution
+    logger.addHandler(logging.StreamHandler(sys.stdout))
+    if args.debug:
+        logger.setLevel(logging.DEBUG)
+    else:
+        logger.setLevel(logging.INFO)
+
+    sys.exit(crawler_main(
+        crawled_directory_path=args.crawled_directory_path,
+        cfood_file_name=args.cfood_file_name,
+        identifiables_definition_file=args.load_identifiables,
+        debug=args.debug,
+        provenance_file=args.provenance,
+        dry_run=args.dry_run,
+        prefix=args.prefix,
+        securityMode={"retrieve": SecurityMode.RETRIEVE,
+                      "insert": SecurityMode.INSERT,
+                      "update": SecurityMode.UPDATE}[args.security_mode],
+        unique_names=args.unique_names,
+    ))
 
 
 if __name__ == "__main__":
-    sys.exit(main())
+    main()
diff --git a/src/caoscrawler/identifiable_adapters.py b/src/caoscrawler/identifiable_adapters.py
index 47fd5324a4803c67d7c9f99448378e7b5f9241bd..d4c2b1d04316946dc28fec15489e0dc390cb9dd3 100644
--- a/src/caoscrawler/identifiable_adapters.py
+++ b/src/caoscrawler/identifiable_adapters.py
@@ -27,8 +27,10 @@ import yaml
 
 from datetime import datetime
 import caosdb as db
+import logging
 from abc import abstractmethod, ABCMeta
 from .utils import has_parent
+logger = logging.getLogger(__name__)
 
 
 def convert_value(value):
@@ -50,8 +52,11 @@ def convert_value(value):
         return str(value.id)
     elif isinstance(value, datetime):
         return value.isoformat()
+    elif type(value) == str:
+        # replace single quotes, otherwise they may break the queries
+        return value.replace("\'", "\\'")
     else:
-        return str(value)
+        return f"{value}"
 
 
 class IdentifiableAdapter(metaclass=ABCMeta):
@@ -202,7 +207,9 @@ class IdentifiableAdapter(metaclass=ABCMeta):
             if record_prop is None:
                 # TODO: how to handle missing values in identifiables
                 #       raise an exception?
-                raise NotImplementedError()
+                raise NotImplementedError(
+                    f"RECORD\n{record}\nPROPERTY\n{prop.name}"
+                )
             newval = record_prop.value
             if isinstance(record_prop.value, db.Entity):
                 newval = self.resolve_reference(record_prop.value)
@@ -245,6 +252,7 @@ class IdentifiableAdapter(metaclass=ABCMeta):
         pass
 
     # TODO: remove side effect
+    # TODO: use ID if record has one?
     def retrieve_identified_record_for_record(self, record: db.Record):
         """
         This function combines all functionality of the IdentifierAdapter by
@@ -382,7 +390,8 @@ class LocalStorageIdentifiableAdapter(IdentifiableAdapter):
             if self.check_record(record, identifiable):
                 candidates.append(record)
         if len(candidates) > 1:
-            raise RuntimeError("Identifiable was not defined unambigiously.")
+            raise RuntimeError(
+                f"Identifiable was not defined unambigiously. Possible candidates are {candidates}")
         if len(candidates) == 0:
             return None
         return candidates[0]
@@ -464,7 +473,8 @@ class CaosDBIdentifiableAdapter(IdentifiableAdapter):
         query_string = self.create_query_for_identifiable(identifiable)
         candidates = db.execute_query(query_string)
         if len(candidates) > 1:
-            raise RuntimeError("Identifiable was not defined unambigiously.")
+            raise RuntimeError(
+                f"Identifiable was not defined unambigiously.\n{query_string}\nReturned the following {candidates}.")
         if len(candidates) == 0:
             return None
         return candidates[0]
diff --git a/src/caoscrawler/macros/__init__.py b/src/caoscrawler/macros/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0acfb1763039a3bb800bbf0e26d6940b49d045cf
--- /dev/null
+++ b/src/caoscrawler/macros/__init__.py
@@ -0,0 +1 @@
+from .macro_yaml_object import defmacro_constructor, macro_constructor
diff --git a/src/caoscrawler/macros/macro_yaml_object.py b/src/caoscrawler/macros/macro_yaml_object.py
new file mode 100644
index 0000000000000000000000000000000000000000..2849986e6deb5cb2cba9e45516e6ce8e1a93dfa0
--- /dev/null
+++ b/src/caoscrawler/macros/macro_yaml_object.py
@@ -0,0 +1,155 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Alexander Schlemmer
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+#
+
+# Function to expand a macro in yaml
+# A. Schlemmer, 05/2022
+
+from dataclasses import dataclass
+from typing import Any, Dict
+from copy import deepcopy
+from string import Template
+
+
+@dataclass
+class MacroDefinition:
+    """
+    Stores a macro definition.
+    name: Name of the macro
+    params: variables and default values to be substituted in keys or values
+    definition: A dictionary that will be substituted including parameters
+    """
+    name: str
+    params: Dict[str, Any]
+    definition: Any
+
+
+# This dictionary stores the macro definitions
+macro_store: Dict[str, MacroDefinition] = dict()
+
+
+def substitute(propvalue, values: dict):
+    """
+    Substitution of variables in strings using the variable substitution
+    library from python's standard library.
+    """
+    propvalue_template = Template(propvalue)
+    return propvalue_template.safe_substitute(**values)
+
+
+def substitute_dict(sourced: Dict[str, Any], values: Dict[str, Any]):
+    """
+    Create a copy of sourced.
+    Afterwards recursively do variable substitution on all keys and values.
+    """
+    d = deepcopy(sourced)
+    # Changes in keys:
+    replace: Dict[str, str] = dict()
+    for k in d:
+        replacement = substitute(k, values)
+        if replacement != k:
+            replace[k] = replacement
+    for k, v in replace.items():
+        d[v] = d[k]
+        del d[k]
+    # Changes in values:
+    for k, v in d.items():
+        if isinstance(v, str):
+            d[k] = substitute(v, values)
+        elif isinstance(v, list):
+            subst_list = list()
+            for i in d[k]:
+                if isinstance(i, str):
+                    subst_list.append(substitute(i, values))
+                elif isinstance(i, dict):
+                    subst_list.append(substitute_dict(i, values))
+                else:
+                    subst_list.append(i)
+            d[k] = subst_list
+        elif isinstance(v, dict):
+            d[k] = substitute_dict(v, values)
+        else:
+            pass
+    return d
+
+
+def defmacro_constructor(loader, node):
+    """
+    Function for registering macros in yaml files.
+
+    It can be registered in pyaml using:
+    yaml.SafeLoader.add_constructor("!defmacro", defmacro_constructor)
+    """
+
+    value = loader.construct_mapping(node, deep=True)
+    params = {}
+    if "params" in value:
+        params = value["params"]
+    macro = MacroDefinition(
+        value["name"], params,
+        value["definition"])
+    macro_store[macro.name] = macro
+    return {}
+
+
+def macro_constructor(loader, node):
+    """
+    Function for substituting macros in yaml files.
+
+    It can be registered in pyaml using:
+    yaml.SafeLoader.add_constructor("!macro", macro_constructor)
+    """
+    res = dict()
+    value = loader.construct_mapping(node, deep=True)
+    for name, params_setter in value.items():
+        if name in macro_store:
+            # If params_setter is a list, run this for every element:
+            if params_setter is not None and isinstance(params_setter, list):
+                for el in params_setter:
+                    macro = macro_store[name]
+                    params = deepcopy(macro.params)
+                    if el is not None:
+                        if isinstance(el, dict):
+                            params.update(el)
+                        else:
+                            raise RuntimeError("params type not supported")
+                    else:
+                        raise RuntimeError("params type must not be None")
+                    definition = substitute_dict(macro.definition, params)
+                    res.update(definition)
+            else:
+                # This is just a single macro:
+                macro = macro_store[name]
+                params = deepcopy(macro.params)
+                if params_setter is not None:
+                    if isinstance(params_setter, dict):
+                        params.update(params_setter)
+                    else:
+                        raise RuntimeError("params type not supported")
+                definition = substitute_dict(macro.definition, params)
+                res.update(definition)
+        else:
+            # If there is no macro with that name, just keep that node:
+            res[name] = params_setter
+
+    return res
diff --git a/src/caoscrawler/structure_elements.py b/src/caoscrawler/structure_elements.py
index 6be653a4758e8c3fb789b22ea655836a3d976c34..01996b4ff3e14a9739857e6e03ceca161300b37e 100644
--- a/src/caoscrawler/structure_elements.py
+++ b/src/caoscrawler/structure_elements.py
@@ -23,12 +23,15 @@
 # ** end header
 #
 
+from typing import Dict
+
+
 class StructureElement(object):
     """ base class for elements in the hierarchical data structure """
 
     def __init__(self, name):
         # Used to store usage information for debugging:
-        self.metadata: dict[str, set[str]] = {
+        self.metadata: Dict[str, set[str]] = {
             "usage": set()
         }
 
diff --git a/src/doc/README_SETUP.md b/src/doc/README_SETUP.md
new file mode 100644
index 0000000000000000000000000000000000000000..b6995c9a2d950ecd1e832d5b49dac9ed88a7e455
--- /dev/null
+++ b/src/doc/README_SETUP.md
@@ -0,0 +1,82 @@
+# Getting started with the CaosDB Crawler #
+
+## Installation ##
+
+### Requirements ###
+
+
+### How to install ###
+
+#### Linux ####
+
+Make sure that Python (at least version 3.8) and pip is installed, using your system tools and
+documentation.
+
+Then open a terminal and continue in the [Generic installation](#generic-installation) section.
+
+#### Windows ####
+
+If a Python distribution is not yet installed, we recommend Anaconda Python, which you can download
+for free from [https://www.anaconda.com](https://www.anaconda.com).  The "Anaconda Individual Edition" provides most of all
+packages you will ever need out of the box.  If you prefer, you may also install the leaner
+"Miniconda" installer, which allows you to install packages as you need them.
+
+After installation, open an Anaconda prompt from the Windows menu and continue in the [Generic
+installation](#generic-installation) section.
+
+#### MacOS ####
+
+If there is no Python 3 installed yet, there are two main ways to
+obtain it: Either get the binary package from
+[python.org](https://www.python.org/downloads/) or, for advanced
+users, install via [Homebrew](https://brew.sh/). After installation
+from python.org, it is recommended to also update the TLS certificates
+for Python (this requires administrator rights for your user):
+
+```sh
+# Replace this with your Python version number:
+cd /Applications/Python\ 3.9/
+
+# This needs administrator rights:
+sudo ./Install\ Certificates.command
+```
+
+After these steps, you may continue with the [Generic
+installation](#generic-installation).
+
+#### Generic installation ####
+
+---
+
+Obtain the sources from GitLab and install from there (`git` must be installed for
+this option):
+
+```sh
+git clone https://gitlab.com/caosdb/caosdb-crawler
+cd caosdb-crawler
+pip3 install --user .
+```
+
+**Note**: In the near future, this package will also be made available on PyPi.
+
+## Configuration ##
+
+
+
+## Try it out ##
+
+
+
+## Run Unit Tests
+
+## Documentation ##
+
+Build documentation in `src/doc` with `make html`.
+
+### Requirements ###
+
+- `sphinx`
+- `sphinx-autoapi`
+- `recommonmark`
+
+### Troubleshooting ###
diff --git a/src/doc/_apidoc/modules.rst b/src/doc/_apidoc/modules.rst
deleted file mode 100644
index 17f187982981ffbf7bcc857056d10644c2bd422b..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/modules.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler
-==========
-
-.. toctree::
-   :maxdepth: 4
-
-   newcrawler
diff --git a/src/doc/_apidoc/newcrawler.converters.rst b/src/doc/_apidoc/newcrawler.converters.rst
deleted file mode 100644
index 893391c229b94baeed9a44c57877ed33f37b2f5e..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.converters.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.converters module
-============================
-
-.. automodule:: newcrawler.converters
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.crawl.rst b/src/doc/_apidoc/newcrawler.crawl.rst
deleted file mode 100644
index b00a6ab6498a0482cea3e9faa54d66d66991dc2d..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.crawl.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.crawl module
-=======================
-
-.. automodule:: newcrawler.crawl
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.identifiable_adapters.rst b/src/doc/_apidoc/newcrawler.identifiable_adapters.rst
deleted file mode 100644
index d8926f41b72d2c54931f045d75f9fe59b21e6076..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.identifiable_adapters.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.identifiable\_adapters module
-========================================
-
-.. automodule:: newcrawler.identifiable_adapters
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.identified_cache.rst b/src/doc/_apidoc/newcrawler.identified_cache.rst
deleted file mode 100644
index 6f697362ad44d1fec01f328550dc8667cc889019..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.identified_cache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.identified\_cache module
-===================================
-
-.. automodule:: newcrawler.identified_cache
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.rst b/src/doc/_apidoc/newcrawler.rst
deleted file mode 100644
index 202444a5efbde248e52d712575ade49f6dd50601..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.rst
+++ /dev/null
@@ -1,24 +0,0 @@
-newcrawler package
-==================
-
-Submodules
-----------
-
-.. toctree::
-   :maxdepth: 4
-
-   newcrawler.converters
-   newcrawler.crawl
-   newcrawler.identifiable_adapters
-   newcrawler.identified_cache
-   newcrawler.stores
-   newcrawler.structure_elements
-   newcrawler.utils
-
-Module contents
----------------
-
-.. automodule:: newcrawler
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.stores.rst b/src/doc/_apidoc/newcrawler.stores.rst
deleted file mode 100644
index 7d446c1cd45a6bf1c4b6cf1b1d33e9a2a5ad9751..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.stores.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.stores module
-========================
-
-.. automodule:: newcrawler.stores
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.structure_elements.rst b/src/doc/_apidoc/newcrawler.structure_elements.rst
deleted file mode 100644
index 4613e1d58b0ef9c7cc38096aa25270f469836ce5..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.structure_elements.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.structure\_elements module
-=====================================
-
-.. automodule:: newcrawler.structure_elements
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/_apidoc/newcrawler.utils.rst b/src/doc/_apidoc/newcrawler.utils.rst
deleted file mode 100644
index 4df55a234fd85072068e41d1ce7bb3b17fd1a698..0000000000000000000000000000000000000000
--- a/src/doc/_apidoc/newcrawler.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-newcrawler.utils module
-=======================
-
-.. automodule:: newcrawler.utils
-   :members:
-   :undoc-members:
-   :show-inheritance:
diff --git a/src/doc/cfood.rst b/src/doc/cfood.rst
new file mode 100644
index 0000000000000000000000000000000000000000..677cadc55709c6c25d16ff547b311102ee78699a
--- /dev/null
+++ b/src/doc/cfood.rst
@@ -0,0 +1,149 @@
+CFood-Definition
+================
+
+The crawler specification is called CFood-definition. It is stored inside a yaml file, or - more precisely - inside of one single or two yaml documents inside a yaml file.
+
+The specification consists of three separate parts:
+#. Metadata and macro definitions
+#. Custom converter registrations
+#. The converter tree specification
+
+In the simplest case, there is just one yaml file with just a single document including at least
+the converter tree specification (see :ref:`example 1<example_1>`). Additionally the custom converter part may be also included in
+this single document (for historical reasons, see :ref:`example 2<example_2>`), but it is recommended to include them in the separate
+document together with the metadata and :doc:`macro<macros>` definitions (see :ref:`below<example_4>`).
+
+If metadata and macro definitions are provided, there **must** be a second document preceeding the
+converter tree specification, including these definitions.
+
+Examples
+++++++++
+
+A single document with a converter tree specification:
+
+.. _example_1:
+.. code-block:: yaml
+                
+   extroot:
+     type: Directory
+     match: ^extroot$
+     subtree:
+       DataAnalysis:
+         type: Directory
+         match: DataAnalysis
+         # (...)
+
+         
+A single document with a converter tree specification, but also including a custom converters section:
+
+.. _example_2:
+.. code-block:: yaml
+
+   Converters:
+     CustomConverter_1:
+       package: mypackage.converters
+       converter: CustomConverter1
+     CustomConverter_2:
+       package: mypackage.converters
+       converter: CustomConverter2
+                
+   extroot:
+     type: Directory
+     match: ^extroot$
+     subtree:
+       DataAnalysis:
+         type: Directory
+         match: DataAnalysis
+         # (...)
+
+
+
+A yaml multi-document, defining metadata and some macros in the first document and declaring
+two custom converters in the second document (**not recommended**, see the recommended version :ref:`below<example_4>`). Please note, that two separate yaml documents can be defined using the ``---`` syntax:
+
+
+.. _example_3:
+.. code-block:: yaml
+
+   ---
+   metadata:
+     name: Datascience CFood
+     description: CFood for data from the local data science work group
+     macros:
+     - !defmacro
+       name: SimulationDatasetFile
+       params:
+         match: null
+         recordtype: null
+         nodename: null
+       definition:
+         # (...)
+   ---
+   Converters:
+     CustomConverter_1:
+       package: mypackage.converters
+       converter: CustomConverter1
+     CustomConverter_2:
+       package: mypackage.converters
+       converter: CustomConverter2
+                
+   extroot:
+     type: Directory
+     match: ^extroot$
+     subtree:
+       DataAnalysis:
+         type: Directory
+         match: DataAnalysis
+         # (...)
+
+
+
+The **recommended way** of defining metadata, custom converters, macros and the main cfood specification is shown in the following code example:
+
+
+.. _example_4:
+.. code-block:: yaml
+
+   ---
+   metadata:
+     name: Datascience CFood
+     description: CFood for data from the local data science work group
+     macros:
+     - !defmacro
+       name: SimulationDatasetFile
+       params:
+         match: null
+         recordtype: null
+         nodename: null
+       definition:
+         # (...)
+     Converters:
+       CustomConverter_1:
+         package: mypackage.converters
+         converter: CustomConverter1
+       CustomConverter_2:
+         package: mypackage.converters
+         converter: CustomConverter2
+   ---
+   extroot:
+     type: Directory
+     match: ^extroot$
+     subtree:
+       DataAnalysis:
+         type: Directory
+         match: DataAnalysis
+         # (...)
+
+
+List Mode
+---------
+
+Specifying values of properties can make use of two special characters, in order to automatically
+create lists or multi properties instead of single values:
+
+.. code-block:: yaml
+                
+        Experiment1:
+            Measurement: +Measurement <- Element in List (list is cleared before run)
+                         *Measurement <- Multi Property (properties are removed before run)
+                         Measurement  <- Overwrite
diff --git a/src/doc/concepts.rst b/src/doc/concepts.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c0f21cbaa322caddabed8e045f7b6fc4253d2959
--- /dev/null
+++ b/src/doc/concepts.rst
@@ -0,0 +1,119 @@
+Concepts
+))))))))
+
+Structure Elements
+++++++++++++++++++
+
+This hierarchical structure is assumed to be consituted of a tree of
+StructureElements. The tree is created on the fly by so called Converters which
+are defined in a yaml file. The tree of StructureElements is a model
+of the existing data (For example could a tree of Python file objects
+(StructureElements) represent a file tree that exists on some file server).
+
+Relevant sources in:
+src/structure_elements.py
+
+Converters
+++++++++++
+
+Converters treat StructureElements and thereby create the StructureElement that
+are the children of the treated StructureElement. Converters therefore create
+the above named tree. The definition of a Converter also contains what
+Converters shall be used to treat the generated child-StructureElements. The
+definition is therefore a tree itself.
+
+See `:doc:converters<converters>` for details.
+
+
+
+Relevant sources in:
+src/converters.py
+
+
+
+Identifiables
++++++++++++++
+
+Relevant sources in:
+src/identifiable_adapters.py
+
+The Crawler
++++++++++++
+
+The crawler can be considered the main program doing the synchronization in basically two steps:
+#. Based on a yaml-specification scan the file system (or other sources) and create a set of CaosDB Entities that are supposed to be inserted or updated in a CaosDB instance.
+#. Compare the current state of the CaosDB instance with the set of CaosDB Entities created in step 1, taking into account the :ref:`registered identifiables<Identifiables>`. Insert or update entites accordingly.
+
+Relevant sources in:
+src/crawl.py
+
+
+
+Special Cases
+=============
+
+Variable Precedence
++++++++++++++++++++
+
+Let's assume the following situation
+
+.. code-block:: yaml
+                
+  description:
+    type: DictTextElement
+    match_value: (?P<description>.*)
+    match_name: description
+
+
+Making use of the $description variable could refer to two different variables created here:
+1. The structure element path.
+2. The value of the matched expression.
+
+The matched expression does take precedence over the structure element path and shadows it.
+
+Make sure, that if you want to be able to use the structure element path, to give unique names
+to the variables like:
+
+.. code-block:: yaml
+                
+  description_text_block:
+    type: DictTextElement
+    match_value: (?P<description>.*)
+    match_name: description
+
+
+Scopes
+========
+
+Example:
+
+.. code-block:: yaml
+                
+  DicomFile:
+    type: SimpleDicomFile
+    match: (?P<filename>.*)\.dicom
+    records:
+      DicomRecord:
+        name: $filename
+    subtree:  # header of dicom file
+      PatientID:
+        type: DicomHeaderElement
+        match_name: PatientName
+        match_value: (?P<patient>.*)
+        records:
+          Patient:
+            name: $patient
+            dicom_name: $filename  # $filename is in same scope!
+  ExperimentFile:
+    type: MarkdownFile
+    match: ^readme.md$
+    records:
+      Experiment:
+        dicom_name: $filename  # does NOT work, because $filename is out of scope!
+
+
+# can variables be used within regexp?
+
+
+File Objects
+============
diff --git a/src/doc/conf.py b/src/doc/conf.py
index fb37cdd96c440300741aeb49e90caffe4370f5d7..30ce670eb8685e9701eeeb59bf22451a21fb16b9 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -53,6 +53,7 @@ extensions = [
     'sphinx.ext.autosectionlabel',
     'sphinx.ext.intersphinx',
     'sphinx.ext.napoleon',     # For Google style docstrings
+    "recommonmark",            # For markdown files.
     "sphinx_rtd_theme",
 ]
 
@@ -61,7 +62,7 @@ templates_path = ['_templates']
 
 # The suffix(es) of source filenames.
 # You can specify multiple suffix as a list of string:
-source_suffix = ['.rst']
+source_suffix = ['.rst', '.md']
 
 # The master toctree document.
 master_doc = 'index'
@@ -71,7 +72,7 @@ master_doc = 'index'
 #
 # This is also used if you do content translation via gettext catalogs.
 # Usually you set "language" from the command line for these cases.
-language = None
+language = "en"
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
@@ -99,7 +100,7 @@ html_theme = "sphinx_rtd_theme"
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = []  # ['_static']
 
 # Custom sidebar templates, must be a dictionary that maps document names
 # to template names.
diff --git a/src/doc/converters.rst b/src/doc/converters.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7ec93535ec41dc211e2fa7ee194b2ecbe1a659fb
--- /dev/null
+++ b/src/doc/converters.rst
@@ -0,0 +1,309 @@
+Converters
+))))))))))
+
+Converters treat StructureElements and thereby create the StructureElement that
+are the children of the treated StructureElement. Converters therefore create
+the tree of structure elements. The definition of a Converter also contains what
+Converters shall be used to treat the generated child-StructureElements. The
+definition is therefore a tree itself.
+
+Each StructureElement in the tree has a set of data values, i.e a dictionary of
+key value pairs.
+Some of those values are set due to the kind of StructureElement. For example,
+a file could have the file name as such a key value pair: 'filename': <sth>.
+Converters may define additional functions that create further values. For
+example, a regular expresion could be used to get a date from a file name.
+
+
+
+
+A converter is defined via a yml file or part of it. The definition states
+what kind of StructureElement it treats (typically one).
+Also, it defines how children of the current StructureElement are
+created and what Converters shall be used to treat those.
+
+The yaml definition looks like the following:
+
+TODO: outdated, see cfood-schema.yml
+
+.. code-block:: yaml
+                
+    <NodeName>:
+        type: <ConverterName>
+        match: ".*"
+        records:
+            Experiment1:
+                parents:
+                - Experiment
+                - Blablabla
+                date: $DATUM
+                (...)
+            Experiment2:
+                parents:
+                - Experiment
+        subtree:
+            (...)
+     
+The **<NodeName>** is a description of what it represents (e.g.
+'experiment-folder') and is used as identifier.
+
+**<type>** selects the converter that is going to be matched against the current structure
+element. If the structure element matches (this is a combination of a typecheck and a detailed
+match, see :py:class:`~caoscrawler.converters.Converter` for details) the converter is used
+to generate records (see :py:meth:`~caoscrawler.converters.Converter.create_records`) and to possibly process a subtree, as defined by the function :func:`caoscrawler.converters.create_children`.
+
+**records** is a dict of definitions that define the semantic structure
+(see details below).
+
+Subtree contains a list of Converter defnitions that look like the one
+described here.
+
+
+Standard Converters
++++++++++++++++++++
+
+Directory Converter
+===================
+
+Simple File Converter
+=====================
+
+Markdown File Converter
+=======================
+
+Dict Converter
+==============
+
+Typical Subtree converters
+--------------------------
+
+DictBooleanElementConverter
+DictFloatElementConverter
+DictTextElementConverter
+DictIntegerElementConverter
+DictListElementConverter
+DictDictElementConverter
+
+YAMLFileConverter
+=================
+
+A specialized Dict Converter for yaml files: Yaml files are opened and the contents are
+converted into dictionaries that can be further converted using the typical subtree converters
+of dict converter.
+
+**WARNING**: Currently unfinished implementation.
+
+JSONFileConverter
+=================
+
+
+
+TextElementConverter
+====================
+
+TableConverter
+==============
+
+A generic converter (abstract) for files containing tables.
+Currently, there are two specialized implementations for xlsx-files and csv-files.
+
+All table converters generate a subtree that can be converted with DictDictElementConverters:
+For each row in the table a DictDictElement (structure element) is generated. The key of the
+element is the row number. The value of the element is a dict containing the mapping of
+column names to values of the respective cell.
+
+Example:
+
+.. code-block:: yaml
+                
+   subtree:
+     TABLE:
+       type: CSVTableConverter
+       match: ^test_table.csv$
+       records:
+         (...)  # Records edited for the whole table file
+       subtree:
+         ROW:
+           type: DictDictElement
+           match_name: .*
+           match_value: .*
+           records:
+             (...)  # Records edited for each row
+           subtree:
+             COLUMN:
+               type: DictFloatElement
+               match_name: measurement  # Name of the column in the table file
+               match_value: (?P<column_value).*)
+               records:
+                 (...)  # Records edited for each cell
+
+
+XLSXTableConverter
+==================
+
+CSVTableConverter
+=================
+
+Custom Converters
++++++++++++++++++
+
+It was previously mentioned that it is possible to create custom converters.
+These custom converters can be used to integrate arbitrary data extraction and ETL capabilities
+into the caosdb-crawler and make these extensions available to any yaml specification.
+
+The basic syntax for adding a custom converter to a yaml cfood definition file is:
+
+.. code-block:: yaml
+
+   Converters:
+     <NameOfTheConverterInYamlFile>:
+       package: <python>.<module>.<name>
+       converter: <PythonClassName>
+
+The Converters-section can be either put into the first or second document of the cfood yaml file.
+It can be also part of a single-document yaml cfood file. Please refer to :doc:`the cfood documentation<cfood>` for more details.
+
+Details:
+
+- **<NameOfTheConverterInYamlFile>**: This is the name of the converter as it is going to be used in the present yaml file.
+- **<python>.<module>.<name>**: The name of the module where the converter class resides.
+- **<PythonClassName>**: Within this specified module there must be a class inheriting from base class :py:class:`caoscrawler.converters.Converter`.
+
+The following methods are abstract and need to be overwritten by your custom converter to make it work:
+
+- :py:meth:`~caoscrawler.converters.Converter.create_children`
+- :py:meth:`~caoscrawler.converters.Converter.match`
+- :py:meth:`~caoscrawler.converters.Converter.typecheck`
+
+  
+Example
+=======
+  
+In the following, we will explain the process of adding a custom converter to a yaml file using
+a SourceResolver that is able to attach a source element to another entity.
+
+**Note**: This example might become a standard crawler soon, as part of the scifolder specification. See https://doi.org/10.3390/data5020043 for details. In this documentation example we will, therefore, add it to a package called "scifolder".
+
+First we will create our package and module structure, which might be:
+
+.. code-block::
+
+   scifolder_package/
+     README.md
+     setup.cfg
+     setup.py
+     Makefile
+     tox.ini
+     src/
+       scifolder/
+         __init__.py
+         converters/
+           __init__.py
+           sources.py  # <- the actual file containing
+                       #    the converter class
+     doc/
+     unittests/
+
+Now we need to create a class called "SourceResolver" in the file "sources.py". In this - more advanced - example, we will not inherit our converter directly from :py:class:`~caoscrawler.converters.Converter`, but use :py:class:`~caoscrawler.converters.TextElementConverter`. The latter already implements :py:meth:`~caoscrawler.converters.Converter.match` and :py:meth:`~caoscrawler.converters.Converter.typecheck`, so only an implementation for :py:meth:`~caoscrawler.converters.Converter.create_children` has to be provided by us.
+Furthermore we will customize the method :py:meth:`~caoscrawler.converters.Converter.create_records` that allows us to specify a more complex record generation procedure than provided in the standard implementation. One specific limitation of the standard implementation is, that only a fixed
+number of records can be generated by the yaml definition. So for any applications - like here - that require an arbitrary number of records to be created, a customized implementation of :py:meth:`~caoscrawler.converters.Converter.create_records` is recommended.
+In this context it is recommended to make use of the function :func:`caoscrawler.converters.create_records` that implements creation of record objects from python dictionaries of the same structure
+that would be given using a yaml definition.
+     
+.. code-block:: python
+
+    import re
+    from caoscrawler.stores import GeneralStore, RecordStore
+    from caoscrawler.converters import TextElementConverter, create_records
+    from caoscrawler.structure_elements import StructureElement, TextElement
+    
+
+    class SourceResolver(TextElementConverter):
+      """
+      This resolver uses a source list element (e.g. from the markdown readme file)
+      to link sources correctly.
+      """
+       
+      def __init__(self, definition: dict, name: str,
+                   converter_registry: dict):
+          """
+          Initialize a new directory converter.
+          """
+          super().__init__(definition, name, converter_registry)
+       
+      def create_children(self, generalStore: GeneralStore,
+                                element: StructureElement):
+                                
+          # The source resolver does not create children:
+          
+          return []
+       
+      def create_records(self, values: GeneralStore,
+                         records: RecordStore,
+                         element: StructureElement,
+                         file_path_prefix):
+          if not isinstance(element, TextElement):
+              raise RuntimeError()
+       
+          # This function must return a list containing tuples, each one for a modified
+          # property: (name_of_entity, name_of_property)
+          keys_modified = []
+       
+          # This is the name of the entity where the source is going to be attached:
+          attach_to_scientific_activity = self.definition["scientific_activity"]
+          rec = records[attach_to_scientific_activity]
+       
+          # The "source" is a path to a source project, so it should have the form:
+          # /<Category>/<project>/<scientific_activity>/
+          # obtain these information from the structure element:
+          val = element.value
+          regexp = (r'/(?P<category>(SimulationData)|(ExperimentalData)|(DataAnalysis))'
+                    '/(?P<project_date>.*?)_(?P<project_identifier>.*)'
+                    '/(?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2})(_(?P<identifier>.*))?/')
+       
+          res = re.match(regexp, val)
+          if res is None:
+              raise RuntimeError("Source cannot be parsed correctly.")
+       
+          # Mapping of categories on the file system to corresponding record types in CaosDB:
+          cat_map = {
+              "SimulationData": "Simulation",
+              "ExperimentalData": "Experiment",
+              "DataAnalysis": "DataAnalysis"}
+          linkrt = cat_map[res.group("category")]
+       
+          keys_modified.extend(create_records(values, records, {
+              "Project": {
+                  "date": res.group("project_date"),
+                  "identifier": res.group("project_identifier"),
+              },
+              linkrt: {
+                  "date": res.group("date"),
+                  "identifier": res.group("identifier"),
+                  "project": "$Project"
+              },
+              attach_to_scientific_activity: {
+                  "sources": "+$" + linkrt
+              }}, file_path_prefix))
+       
+          # Process the records section of the yaml definition:
+          keys_modified.extend(
+              super().create_records(values, records, element, file_path_prefix))
+
+          # The create_records function must return the modified keys to make it compatible
+          # to the crawler functions:
+          return keys_modified
+
+
+If the recommended (python) package structure is used, the package containing the converter
+definition can just be installed using `pip install .` or `pip install -e .` from the
+`scifolder_package` directory.
+          
+The following yaml block will register the converter in a yaml file:
+
+.. code-block:: yaml
+
+   Converters:
+     SourceResolver:
+       package: scifolder.converters.sources
+       converter: SourceResolver
diff --git a/src/doc/index.rst b/src/doc/index.rst
index f11d73b58a3216b1d735d6565650148c150ebb68..724bcc543dd1cf0b9af451c487b1b3aab7fa95ca 100644
--- a/src/doc/index.rst
+++ b/src/doc/index.rst
@@ -1,8 +1,23 @@
 Crawler 2.0 Documentation
 =========================
 
-Introduction
-------------
+
+.. toctree::
+   :maxdepth: 2
+   :caption: Contents:
+   :hidden:
+
+   Getting started<README_SETUP>
+   Concepts<concepts>
+   Converters<converters>
+   CFoods (Crawler Definitions)<cfood>
+   Macros<macros>
+   Tutorials<tutorials/index>
+   API documentation<_apidoc/modules>
+
+      
+
+This is the documentation for the crawler (previously known as crawler 2.0) for CaosDB, ``caosdb-crawler``.
 
 The crawler is the main date integration tool for CaosDB.
 Its task is to automatically synchronize data found on file systems or in other
@@ -15,172 +30,15 @@ The hierarchical sturcture can be for example a file tree. However it can be
 also something different like the contents of a json file or a file tree with
 json files.
 
-Concepts
---------
-
-Structure Elements
-++++++++++++++++++
-
-This hierarchical structure is assumed to be consituted of a tree of
-StructureElements. The tree is created on the fly by so called Converters which
-are defined in a yaml file. The tree of StructureElements is a model
-of the existing data (For example could a tree of Python file objects
-(StructureElements) represent a file tree that exists on some file server).
-
-Relevant sources in:
-src/structure_elements.py
-
-Converters
-++++++++++
-
-Converters treat StructureElements and thereby create the StructureElement that
-are the children of the treated StructureElement. Converters therefore create
-the above named tree. The definition of a Converter also contains what
-Converters shall be used to treat the generated child-StructureElements. The
-definition is there a tree itself. (Question: Should there be global Converters
-that are always checked when treating a StructureElement? Should Converters be
-associated with generated child-StructureElements? Currently, all children are
-created and checked against all Converters. It could be that one would like to
-check file-StructureElements against one set of Converters and
-directory-StructureElements against another)
-
-Each StructureElement in the tree has a set of data values, i.e a dictionary of
-key value pairs.
-Some of those values are set due to the kind of StructureElement. For example,
-a file could have the file name as such a key value pair: 'filename': <sth>.
-Converters may define additional functions that create further values. For
-example, a regular expresion could be used to get a date from a file name.
-
-
-
-
-A converter is defined via a yml file or part of it. The definition states
-what kind of StructureElement it treats (typically one).
-Also, it defines how children of the current StructureElement are
-created and what Converters shall be used to treat those.
-
-The yaml definition looks like the following:
-
-TODO: outdated, see cfood-schema.yml
-
-converter-name:
-    type: <StructureElement Type>
-    match: ".*"
-    records:
-        Experiment1:
-            parents:
-            - Experiment
-            - Blablabla
-            date: $DATUM
-            <...>
-        Experiment2:
-            parents:
-            - Experiment
-    valuegenerators:
-        datepattern:
-        <...>
-    childrengenerators:
-        create_children_from_directory:
-            sort-by-date: true
-    subtree:
-
-
-records:
-    Measurement: <- wird automatisch ein value im valueStore
-        run_number: 25
-    Experiment1:
-        Measurement: +Measurement <- Element in List (list is cleared before run)
-                     *Measurement <- Multi Property (properties are removed before run)
-                     Measurement  <- Overwrite
-
-UPDATE-Stage prüft ob es z.B. Gleichheit zwischen Listen gibt (die dadurch definiert sein
-kann, dass alle Elemente vorhanden, aber nicht zwingend in der richtigen Reihenfolge sind)
-evtl. brauchen wir das nicht, weil crawler eh schon deterministisch ist.
-
-The converter-name is a description of what it represents (e.g.
-'experiment-folder') and is used as identifier.
-
-The type restricts what kind of StructureElements are treated.
-The match is by default a regular expression, that is matche against the
-name of StructureElements. Discussion: StructureElements might not have a
-name (e.g. a dict) or should a name be created artificially if necessary
-(e.g. "root-dict")? It might make sense to allow keywords like "always" and
-other kinds of checks. For example a dictionary could be checked against a
-json-schema definition.
-
-recordtypes is a list of definitions that define the semantic structure
-(see details below).
-
-valuegenerators allow to provide additional functionality that creates
-data values in addition to the ones given by default via the
-StructureElement. This can be for example a match group of a regular
-expression applied to the filename.
-It should be possible to access the values of parent nodes. For example,
-the name of a parent node could be accessed with $converter-name.name.
-Discussion: This can introduce conflicts, if the key <converver-name>
-already exists. An alternative would be to identify those lookups. E.g.
-$$converter-name.name (2x$).
-
-childrengenerators denotes how StructureElements shall be created that are
-children of the current one.
-
-subtree contains a list of Converter defnitions that look like the one
-described here.
-
-those keywords should be allowed but not required. I.e. if no
-valuegenerators shall be defined, the keyword may be omitted.
-
-
-Relevant sources in:
-src/converters.py
-
-Identifiables
-+++++++++++++
-
-Relevant sources in:
-src/identifiable_adapters.py
-
-The Crawler
-+++++++++++
-
-The crawler can be considered the main program doing the synchronization in basically two steps:
-1. Based on a yaml-specification scan the file system (or other sources) and create a set
-   of CaosDB Entities that are supposed to be inserted or updated in a CaosDB instance.
-2. Compare the current state of the CaosDB instance with the set of CaosDB Entities created in
-   step 1, taking into account the :ref:`registered identifiables<Identifiables>`. Insert or
-   update entites accordingly.
-
-Relevant sources in:
-src/crawl.py
-
-
-
-Special Cases
-=============
-
-Variable Precedence
-++++++++++++
-
-Let's assume the following situation
-
-.. code-block:: yaml
-  description:
-    type: DictTextElement
-    match_value: (?P<description>.*)
-    match_name: description
+This documentation helps you to :doc:`get started<README_SETUP>`, explains the most important
+:doc:`concepts<concepts>` and offers a range of :doc:`tutorials<tutorials/index>`.
 
 
-Making use of the $description variable could refer to two different variables created here:
-1. The structure element path.
-2. The value of the matched expression.
+Indices and tables
+==================
 
-The matched expression does take precedence over the structure element path and shadows it.
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
 
-Make sure, that if you want to be able to use the structure element path, to give unique names
-to the variables like:
 
-.. code-block:: yaml
-  description_text_block:
-    type: DictTextElement
-    match_value: (?P<description>.*)
-    match_name: description
diff --git a/src/doc/macros.rst b/src/doc/macros.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3d995c1fbc67b155a6df606ac2f84a0cec26d1a5
--- /dev/null
+++ b/src/doc/macros.rst
@@ -0,0 +1,88 @@
+Macros
+------
+
+Macros highly facilitate the writing of complex :doc:`CFoods<cfood>`. Consider the following prevalent example:
+
+.. _example_files:
+.. code-block:: yaml
+
+  ExperimentalData:
+    type: Directory
+    match: ExperimentalData
+    subtree:
+      README:
+        type: SimpleFile
+        match: ^README.md$
+        records:
+          ReadmeFile:
+            parents:
+            - MarkdownFile
+            role: File
+            path: $README
+            file: $README
+
+This example just inserts a file called ``README.md`` contained in Folder ``ExpreimentalData/`` into CaosDB, assigns the parent (RecordType) ``MarkdownFile`` and allows for later referencing this entity within the cfood. As file objects are created in the cfood specification using the ``records`` section with the special role ``File``, defining and using many files can become very cumbersome and make the cfood file difficult to read.
+
+The same version using cfood macros could be defined as follows:
+            
+.. _example_files_2:
+.. code-block:: yaml
+
+  ---
+  metadata:
+    macros:
+    - !defmacro
+      name: MarkdownFile
+      params:
+        name: null
+        filename: null
+      definition:
+        ${name}_filename
+          type: SimpleFile
+          match: $filename
+          records:
+            $name:
+              parents:
+              - MarkdownFile
+              role: File
+              path: ${name}_filename
+              file: ${name}_filename
+  ---
+  ExperimentalData:
+    type: Directory
+    match: ExperimentalData
+    subtree: !macro
+      MarkdownFile:
+      - name: README
+        filename: ^README.md$
+
+
+
+
+
+Complex Example
+===============
+
+.. _example_1:
+.. code-block:: yaml
+   
+     macros:
+     - !defmacro
+       name: SimulationDatasetFile
+       params:
+         match: null
+         recordtype: null
+         nodename: null
+       definition:
+         $nodename:
+           match: $match
+           type: SimpleFile
+           records:
+             File:
+               parents:
+               - $recordtype
+               role: File
+               path: $$$nodename
+               file: $$$nodename
+             Simulation:
+               $recordtype: +$File
diff --git a/src/doc/tutorials/index.rst b/src/doc/tutorials/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1652515968c3b0025a2916604632d57c042f119b
--- /dev/null
+++ b/src/doc/tutorials/index.rst
@@ -0,0 +1,2 @@
+Tutorials
++++++++++
diff --git a/tox.ini b/tox.ini
index 2cf966fb5b80e62cb7f216b0785ba567e13ee3ff..101904b7de43fba6f04cf65641f555d79b0b080a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,11 +1,15 @@
 [tox]
-envlist=py36, py37, py38, py39, py310
+envlist=py38, py39, py310
 skip_missing_interpreters = true
 
 [testenv]
 deps = .
     pytest
     pytest-cov
-commands=py.test --cov=caosdb -vv {posargs}
+    # TODO: Make this f-branch sensitive
+    git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev
+    git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev
+commands= caosdb-crawler --help
+    py.test --cov=caosdb -vv {posargs}
 [flake8]
 max-line-length=100
diff --git a/unittests/cfoods_scalar.yml b/unittests/cfoods_scalar.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d0a728c35c27e331114cc5c18ebcfd1aa0905e31
--- /dev/null
+++ b/unittests/cfoods_scalar.yml
@@ -0,0 +1,14 @@
+# This is a test cfood for:
+# https://gitlab.com/caosdb/caosdb-crawler/-/issues/9
+
+Data:  # name of the converter
+  type: Directory
+  match: (.*)
+  subtree:
+    DataAnalysis:  # name of the converter
+      type: Directory
+      match: DataAnalysis
+      records:
+        RecordThatGetsParentsLater:
+          someId: 23  # <- this scalar causes problems
+
diff --git a/unittests/scifolder_cfood.yml b/unittests/scifolder_cfood.yml
index 1fd7c98d57b35fa651e36bee2c529a46e3a96cde..90f193444bfda7296c46260236274da2378635cc 100644
--- a/unittests/scifolder_cfood.yml
+++ b/unittests/scifolder_cfood.yml
@@ -16,7 +16,7 @@ Data:  # name of the converter
       subtree: &template
         project_dir:  # name of the first subtree element which is a converter
           type: Directory
-          match: (?P<date>.*?)_(?P<identifier>.*)
+          match: ((?P<date>[0-9]{4,4})_)?(?P<identifier>.*)
           records:
             Project:  # this is an identifiable in this case
               parents:
diff --git a/unittests/scifolder_extended.yml b/unittests/scifolder_extended.yml
index 2a1416b778e96ba57fc216d9763572568703ab75..9bab612b9b37e8e295ee8fd02575de506a98d8fc 100644
--- a/unittests/scifolder_extended.yml
+++ b/unittests/scifolder_extended.yml
@@ -16,12 +16,12 @@ Data:  # name of the converter
       subtree: &template
         project_dir:  # name of the first subtree element which is a converter
           type: Directory
-          match: (?P<date>.*?)_(?P<identifier>.*)
+          match: ((?P<year>[0-9]{4,4})_)?(?P<identifier>.*)
           records:
             Project:  # this is an identifiable in this case
               parents:
               - Project  # not needed as the name is equivalent
-              date: $date
+              date: $year
               identifier: $identifier
       
           subtree:
diff --git a/unittests/scifolder_extended2.yml b/unittests/scifolder_extended2.yml
index f1dfc2d4635b6956930343685c7b17ca4f2f1679..969325e91da488011819c338708a33dcfc32c93e 100644
--- a/unittests/scifolder_extended2.yml
+++ b/unittests/scifolder_extended2.yml
@@ -6,95 +6,99 @@ Definitions:
   type: Definitions
   #include "description.yml"
 
-DataAnalysis:  # name of the converter
+Data:  # name of the converter
   type: Directory
-  match: DataAnalysis
-  subtree: &template
-    project_dir:  # name of the first subtree element which is a converter
+  match: (.*)
+  subtree:
+    DataAnalysis:  # name of the converter
       type: Directory
-      match: (?P<date>.*?)_(?P<identifier>.*)
-      records:
-        Project:  # this is an identifiable in this case
-          parents:
-          - Project  # not needed as the name is equivalent
-          date: $date
-          identifier: $identifier
-  
-      subtree:
-        measurement:  # new name for folders on the 3rd level
+      match: DataAnalysis
+      subtree: &template
+        project_dir:  # name of the first subtree element which is a converter
           type: Directory
-          match: (?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2})(_(?P<identifier>.*))?
+          match: ((?P<year>[0-9]{4,4})_)?(?P<identifier>.*)
           records:
-            Measurement:
-              date: $date
+            Project:  # this is an identifiable in this case
+              parents:
+              - Project  # not needed as the name is equivalent
+              date: $year
               identifier: $identifier
-              project: $Project
+      
           subtree:
-            README:
-              type: MarkdownFile  # this is a subclass of converter File
-              # function signature: GeneralStore, StructureElement
-              # preprocessors: custom.caosdb.convert_values
-              match: ^README\.md$
-              # how to make match case insensitive?
-              records:  # this block is very verbose and intended to make sure that this
-                        # file is inserted correctly (and can be supplemented with properties
-                        # and / or parents), TODO: maybe there should be a shorthand
-                ReadmeFile:
-                  parents:
-                  - ProjectMarkdownReadme
-                  role: File
-                  path: $README
-                  file: $README  # this is automatically the relative path
-                                 # starting from the top level structure element
-                                 # of this element
+            measurement:  # new name for folders on the 3rd level
+              type: Directory
+              match: (?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2})(_(?P<identifier>.*))?
+              records:
                 Measurement:
-                  ReadmeFile: $ReadmeFile
-                      
+                  date: $date
+                  identifier: $identifier
+                  project: $Project
               subtree:
-                description:
-                  type: DictTextElement
-                  match_value: (?P<description>.*)
-                  match_name: description
-                  records:
+                README:
+                  type: MarkdownFile  # this is a subclass of converter File
+                  # function signature: GeneralStore, StructureElement
+                  # preprocessors: custom.caosdb.convert_values
+                  match: ^README\.md$
+                  # how to make match case insensitive?
+                  records:  # this block is very verbose and intended to make sure that this
+                            # file is inserted correctly (and can be supplemented with properties
+                            # and / or parents), TODO: maybe there should be a shorthand
+                    ReadmeFile:
+                      parents: 
+                      - ProjectMarkdownReadme
+                      role: File
+                      path: $README
+                      file: $README  # this is automatically the relative path
+                                    # starting from the top level structure element
+                                    # of this element
                     Measurement:
-                      description: $description
-                responsible_single:
-                    type: DictTextElement
-                    match_name: responsible
-                    match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
-                    records: &responsible_records
-                      Person: 
-                        first_name: $first_name
-                        last_name: $last_name
-                      Measurement:  # this uses the reference to the above defined record
-                        responsible: +$Person    # each record also implicitely creates a variable
-                                                 # with the same name. The "+" indicates, that
-                                                 # this will become a list entry in list property
-                                                 # "responsible" belonging to Measurement.
-
-                responsible_list:
-                  type: DictListElement
-                  match_name: responsible
+                      ReadmeFile: $ReadmeFile
+                          
                   subtree:
-                    Person:
-                      type: TextElement
-                      match: *person_regexp
-                      records: *responsible_records
+                    description:
+                      type: DictTextElement
+                      match_value: (?P<description>.*)
+                      match_name: description
+                      records:
+                        Measurement:
+                          description: $description
+                    responsible_single:
+                        type: DictTextElement
+                        match_name: responsible
+                        match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
+                        records: &responsible_records
+                          Person: 
+                            first_name: $first_name
+                            last_name: $last_name
+                          Measurement:  # this uses the reference to the above defined record
+                            responsible: +$Person    # each record also implicitely creates a variable
+                                                    # with the same name. The "+" indicates, that
+                                                    # this will become a list entry in list property
+                                                    # "responsible" belonging to Measurement.
 
-                # sources_list:
-                #   type: DictListElement
-                #   match_name: sources
-                #   subtree:
-                #     Source:
-                #       type: TextElement
-                #       match: &path ... ???
+                    responsible_list:
+                      type: DictListElement
+                      match_name: responsible
+                      subtree:
+                        Person:
+                          type: TextElement
+                          match: *person_regexp
+                          records: *responsible_records
 
-ExperimentalData:  # name of the converter
-  type: Directory
-  match: ExperimentalData
-  subtree: *template
+                    # sources_list:
+                    #   type: DictListElement
+                    #   match_name: sources
+                    #   subtree:
+                    #     Source:
+                    #       type: TextElement
+                    #       match: &path ... ???
 
-SimulationData:  # name of the converter
-  type: Directory
-  match: SimulationData
-  subtree: *template
+    ExperimentalData:  # name of the converter
+      type: Directory
+      match: ExperimentalData
+      subtree: *template
+
+    SimulationData:  # name of the converter
+      type: Directory
+      match: SimulationData
+      subtree: *template
diff --git a/unittests/simulated_server_data.py b/unittests/simulated_server_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd0c6b4e8693d64c9d96cafc5db2f447613daa1b
--- /dev/null
+++ b/unittests/simulated_server_data.py
@@ -0,0 +1,24 @@
+
+import caosdb as db
+data_model = {"person": (db.RecordType(id=259, name="Person")
+                         .add_property(name="first_name")
+                         .add_property(name="last_name")),
+              "measurement": (db.RecordType(id=278, name="Measurement")
+                              .add_property(name="identifier")
+                              .add_property(name="date")
+                              .add_property(name="project")),
+              "project": (db.RecordType(id=250, name="Project")
+                          .add_property(name="date")
+                          .add_property(name="identifier")),
+              "first_name": db.Property(name="first_name", datatype=db.TEXT, id=261),
+              "responsible": db.Property(name="responsible", datatype="Person", id=249),
+              "last_name": db.Property(name="last_name", datatype=db.TEXT, id=262),
+              "identifier": db.Property(name="identifier", datatype=db.TEXT, id=248),
+              "date": db.Property(name="date", datatype=db.DATETIME, id=247),
+              }
+existing_data = {
+}
+
+full_data = {}
+full_data.update(data_model)
+full_data.update(existing_data)
diff --git a/unittests/test_converters.py b/unittests/test_converters.py
index 5f56486ba0f63fdd64d4e4dd80e6d6eaeed705d1..30c5972c4f006aaf9923dfc058c3b861d8b5123b 100644
--- a/unittests/test_converters.py
+++ b/unittests/test_converters.py
@@ -1,11 +1,10 @@
 #!/usr/bin/env python3
 # encoding: utf-8
 #
-# ** header v3.0
 # This file is a part of the CaosDB Project.
 #
-# Copyright (C) 2021 Indiscale GmbH <info@indiscale.com>
-# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
+# Copyright (C) 2021,2022 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2021,2022 Henrik tom Wörden <h.tomwoerden@indiscale.com>
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -20,30 +19,29 @@
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
 #
-# ** end header
-#
 
 """
 test the converters module
 """
+import importlib
+import os
+import pytest
+import yaml
 
-from caoscrawler.converters import Converter
+from caoscrawler.converters import (Converter, ConverterValidationError,
+                                    DictConverter, DirectoryConverter,
+                                    handle_value, MarkdownFileConverter,
+                                    JSONFileConverter)
+from caoscrawler.crawl import Crawler
 from caoscrawler.stores import GeneralStore
-from caoscrawler.converters import (ConverterValidationError,
-                                    MarkdownFileConverter, JSONFileConverter,
-                                    DictConverter)
-from caoscrawler.structure_elements import Directory
 from caoscrawler.structure_elements import (File, DictTextElement,
                                             DictListElement, DictElement,
                                             DictBooleanElement, DictDictElement,
-                                            DictIntegerElement, DictFloatElement)
+                                            DictIntegerElement,
+                                            DictFloatElement, Directory)
 
 from test_tool import rfp
 
-import pytest
-import os
-import importlib
-
 
 @pytest.fixture
 def converter_registry():
@@ -244,3 +242,146 @@ def test_json_converter(converter_registry):
         children = jsonconverter.create_children(None, broken_json)
 
     assert err.value.message.startswith("Couldn't validate")
+
+
+def test_variable_replacement():
+    values = GeneralStore()
+    values["a"] = 4
+    values["b"] = "68"
+
+    assert handle_value("b", values) == ("b", "single")
+    assert handle_value("+b", values) == ("b", "list")
+    assert handle_value("*b", values) == ("b", "multiproperty")
+    assert handle_value("$b", values) == ("68", "single")
+    assert handle_value("+$b", values) == ("68", "list")
+    assert handle_value("*$b", values) == ("68", "multiproperty")
+
+    assert handle_value({"value": "b",
+                         "collection_mode": "single"}, values) == ("b", "single")
+    assert handle_value({"value": "b",
+                         "collection_mode": "list"}, values) == ("b", "list")
+    assert handle_value({"value": "b",
+                         "collection_mode": "multiproperty"}, values) == ("b", "multiproperty")
+    assert handle_value({"value": "$b",
+                         "collection_mode": "single"}, values) == ("68", "single")
+    assert handle_value({"value": "$b",
+                         "collection_mode": "list"}, values) == ("68", "list")
+    assert handle_value({"value": "$b",
+                         "collection_mode": "multiproperty"}, values) == ("68", "multiproperty")
+
+    assert handle_value(["a", "b"], values) == (["a", "b"], "single")
+    assert handle_value(["$a", "$b"], values) == (["4", "68"], "single")
+
+
+def test_filter_children_of_directory(converter_registry):
+    """Verify that children (i.e., files) in a directory are filtered or sorted
+    correctly.
+
+    """
+    test_dir = Directory("examples_filter_children", rfp(
+        "test_directories", "examples_filter_children"))
+
+    dc = DirectoryConverter(
+        definition={
+            "match": "(.*)",
+            "filter": {
+                "expr": "test_(?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2}).json",
+                "group": "date",
+                "rule": "only_max"
+            }
+        },
+        name="TestOnlyMaxDirectoryConverter",
+        converter_registry=converter_registry
+    )
+
+    m = dc.match(test_dir)
+    assert m is not None
+
+    # This should only contain the youngest json and the csv that doesn't match
+    # the above filter expression.
+    children = dc.create_children(None, test_dir)
+    assert len(children) == 2
+    assert children[0].__class__ == File
+    assert children[0].name == "test_2022-02-02.json"
+    assert children[1].__class__ == File
+    assert children[1].name == "some_other_file.csv"
+
+    dc = DirectoryConverter(
+        definition={
+            "match": "(.*)",
+            "filter": {
+                "expr": "test_(?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2}).json",
+                "group": "date",
+                "rule": "only_min"
+            }
+        },
+        name="TestOnlyMinDirectoryConverter",
+        converter_registry=converter_registry
+    )
+
+    m = dc.match(test_dir)
+    assert m is not None
+
+    # This should only contain the youngest json and the csv that doesn't match
+    # the above filter expression.
+    children = dc.create_children(None, test_dir)
+    assert len(children) == 2
+    assert children[0].__class__ == File
+    assert children[0].name == "test_2022-01-01.json"
+    assert children[1].__class__ == File
+    assert children[1].name == "some_other_file.csv"
+
+    dc = DirectoryConverter(
+        definition={
+            "match": "(.*)",
+            "filter": {
+                "expr": "test_(?P<date>[0-9]{4,4}-[0-9]{2,2}-[0-9]{2,2}).json",
+                "group": "date",
+                "rule": "does_not_exist"
+            }
+        },
+        name="TestBrokenDirectoryConverter",
+        converter_registry=converter_registry
+    )
+
+    m = dc.match(test_dir)
+    assert m is not None
+
+    with pytest.raises(RuntimeError):
+        children = dc.create_children(None, test_dir)
+
+
+def test_validate_custom_converters():
+    one_doc_yaml = """
+Converters:
+  MyNewType:
+    converter: MyNewTypeConverter
+    package: some_package.my_converters
+MyElement:
+  type: MyNewType
+  match: something
+    """
+    crawler1 = Crawler()
+    one_doc_definitions = crawler1._load_definition_from_yaml_dict(
+        [yaml.load(one_doc_yaml, Loader=yaml.SafeLoader)])
+    assert "MyElement" in one_doc_definitions
+    assert one_doc_definitions["MyElement"]["type"] == "MyNewType"
+
+    # this has to be equivalent
+    two_doc_yaml = """
+---
+metadata:
+  Converters:
+    MyNewType:
+      converter: MyNewTypeConverter
+      package: some_package.my_converters
+---
+MyElement:
+  type: MyNewType
+  match: something
+    """
+    crawler2 = Crawler()
+    two_doc_definitions = crawler2._load_definition_from_yaml_dict(
+        list(yaml.safe_load_all(two_doc_yaml)))
+    assert "MyElement" in two_doc_definitions
+    assert two_doc_definitions["MyElement"]["type"] == one_doc_definitions["MyElement"]["type"]
diff --git a/unittests/test_directories/example_substitutions/substitutions_parents.yml b/unittests/test_directories/example_substitutions/substitutions_parents.yml
new file mode 100644
index 0000000000000000000000000000000000000000..107e766ccd833fab618cecfc04f13bc29abc80a6
--- /dev/null
+++ b/unittests/test_directories/example_substitutions/substitutions_parents.yml
@@ -0,0 +1,25 @@
+
+ExperimentalData:  # name of the converter
+  type: Directory
+  match: ExperimentalData
+  records:
+    Project:
+      name: project
+  subtree:
+    File:  # name of the converter
+      type: SimpleFile
+      match: (?P<year>[0-9]{2,2})(?P<month>[0-9]{2,2})(?P<day>[0-9]{2,2})_data.dat
+      records:
+        Experiment:
+          parents:
+          - Experiment
+          - Month_$month  # This adds a special parent as record type
+          date: 20$year-$month-$day
+
+        ExperimentSeries:
+          Experiment: $Experiment
+
+        Project:
+          Experiments: +$Experiment
+          dates: +20$year-$month-$day
+      
diff --git a/unittests/test_directories/examples_filter_children/some_other_file.csv b/unittests/test_directories/examples_filter_children/some_other_file.csv
new file mode 100644
index 0000000000000000000000000000000000000000..bc715fe81656397eae98aa4b04f9af2e3fdd9e43
--- /dev/null
+++ b/unittests/test_directories/examples_filter_children/some_other_file.csv
@@ -0,0 +1,2 @@
+some,other,data
+1,2,3
diff --git a/unittests/test_directories/examples_filter_children/test_2022-01-01.json b/unittests/test_directories/examples_filter_children/test_2022-01-01.json
new file mode 100644
index 0000000000000000000000000000000000000000..8de42f29d2eed374a0aba356c7fce2daa3e08e49
--- /dev/null
+++ b/unittests/test_directories/examples_filter_children/test_2022-01-01.json
@@ -0,0 +1,3 @@
+{
+    "key": "value",
+}
diff --git a/unittests/test_directories/examples_filter_children/test_2022-01-02.json b/unittests/test_directories/examples_filter_children/test_2022-01-02.json
new file mode 100644
index 0000000000000000000000000000000000000000..8de42f29d2eed374a0aba356c7fce2daa3e08e49
--- /dev/null
+++ b/unittests/test_directories/examples_filter_children/test_2022-01-02.json
@@ -0,0 +1,3 @@
+{
+    "key": "value",
+}
diff --git a/unittests/test_directories/examples_filter_children/test_2022-02-02.json b/unittests/test_directories/examples_filter_children/test_2022-02-02.json
new file mode 100644
index 0000000000000000000000000000000000000000..8de42f29d2eed374a0aba356c7fce2daa3e08e49
--- /dev/null
+++ b/unittests/test_directories/examples_filter_children/test_2022-02-02.json
@@ -0,0 +1,3 @@
+{
+    "key": "value",
+}
diff --git a/unittests/test_directories/examples_json/testjson.json b/unittests/test_directories/examples_json/testjson.json
index b893b608a6a2119c5c3252cd9cff4c4100f404da..d37ea2defc21d767e4e13ad3b39d6682b3c452ef 100644
--- a/unittests/test_directories/examples_json/testjson.json
+++ b/unittests/test_directories/examples_json/testjson.json
@@ -6,6 +6,7 @@
         {
 		    "firstname": "Miri",
 		    "lastname": "Mueller",
+		    "other": null,
 		    "email": "miri.mueller@science.de"
 	    },
         {
diff --git a/unittests/test_directories/examples_tables/ExperimentalData/test1.csv b/unittests/test_directories/examples_tables/ExperimentalData/test1.csv
new file mode 100644
index 0000000000000000000000000000000000000000..c2eb297b523c06729937a07221c695105df0b09c
--- /dev/null
+++ b/unittests/test_directories/examples_tables/ExperimentalData/test1.csv
@@ -0,0 +1,8 @@
+Col_1,Col_2,Col_3,text
+Index,description,,
+,m,s,
+0,12,1,jdsfkljadskf
+1,14,3,jdkfljad
+2,3,4,jadkfjdsk
+3,4.5,6,
+4,8,7,jadskfj
diff --git a/unittests/test_directories/examples_tables/ExperimentalData/test1.xlsx b/unittests/test_directories/examples_tables/ExperimentalData/test1.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..2bf68c8a854ae7f618e47e1db58490fc76c055b2
Binary files /dev/null and b/unittests/test_directories/examples_tables/ExperimentalData/test1.xlsx differ
diff --git a/unittests/test_directories/examples_tables/crawler_for_tables.yml b/unittests/test_directories/examples_tables/crawler_for_tables.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7aaea3e55eb4b8cb2329c24c8b7861f0d9e76d69
--- /dev/null
+++ b/unittests/test_directories/examples_tables/crawler_for_tables.yml
@@ -0,0 +1,59 @@
+
+ExperimentalData:
+  type: Directory
+  match: ExperimentalData
+  records:
+    Project:
+      name: project
+  subtree:
+    XLSXTable:
+      type: XLSXTableConverter
+      match: test1\.xlsx
+      skiprows: [1, 2]
+      header: 0
+      records:
+        Experiment: {}
+
+      subtree:
+        Row:
+          type: DictDictElement
+          match_name: .*
+          records:
+            Measurement: {}
+            Experiment:
+              Measurements: +$Measurement
+          subtree:
+            Col_1:
+              type: DictIntegerElement
+              match_name: Col_1
+              match_value: (?P<Value>[0-9]+)
+              records:
+                Measurement:
+                  Col_1: $Value
+    CSVTable:
+      type: CSVTableConverter
+      match: test1\.csv
+      skiprows: [1, 2]
+      header: 0
+      records:
+        Experiment: {}
+
+      subtree:
+        Row:
+          type: DictDictElement
+          match_name: .*
+          records:
+            Measurement: {}
+            Experiment:
+              Measurements: +$Measurement
+          subtree:
+            Col_1:
+              type: DictIntegerElement
+              match_name: Col_1
+              match_value: (?P<Value>[0-9]+)
+              records:
+                Measurement:
+                  Col_1: $Value
+              
+
+              
diff --git a/unittests/test_issues.py b/unittests/test_issues.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e77b0c7f26f4b2970203cfc4b8cc786fe24121b
--- /dev/null
+++ b/unittests/test_issues.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 IndiScale GmbH <info@indiscale.com>
+# Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+from pytest import mark
+
+from caoscrawler.crawl import Crawler
+from caoscrawler.structure_elements import Dict
+from test_tool import rfp
+
+
+@mark.xfail(
+    reason="Wait until value conversion in dicts is fixed, see "
+    "https://gitlab.com/caosdb/caosdb-crawler/-/issues/10."
+)
+def test_issue_10():
+    """Test integer-to-float conversion in dictionaries"""
+    crawler_definition = {
+        "DictTest": {
+            "type": "Dict",
+            "match": "(.*)",
+            "records": {
+                "TestRec": {}
+            },
+            "subtree": {
+                "float_element": {
+                    "type": "DictFloatElement",
+                    "match_name": "float_value",
+                    "match_value": "(?P<float_value>.*)",
+                    "records": {
+                        "TestRec": {
+                            "float_prop": "$float_value"
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    crawler = Crawler(debug=True)
+    converter_registry = crawler.load_converters(crawler_definition)
+
+    test_dict = {
+        "float_value": 4
+    }
+
+    records = crawler.start_crawling(
+        Dict("TestDict", test_dict), crawler_definition, converter_registry)
+    assert len(records) == 1
+    assert records[0].parents[0].name == "TestRec"
+    assert records[0].get_property("float_prop") is not None
+    assert float(records[0].get_property("float_prop").value) == 4.0
diff --git a/unittests/test_macros.py b/unittests/test_macros.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ac34cc7c48df3cb2855d7022119e4775d90c9a6
--- /dev/null
+++ b/unittests/test_macros.py
@@ -0,0 +1,330 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Alexander Schlemmer
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+#
+
+from caoscrawler.macros import defmacro_constructor, macro_constructor
+from caoscrawler.macros.macro_yaml_object import macro_store
+from caoscrawler.crawl import Crawler
+
+from tempfile import NamedTemporaryFile
+
+import yaml
+import pytest
+
+
+@pytest.fixture
+def register_macros():
+    yaml.SafeLoader.add_constructor("!defmacro", defmacro_constructor)
+    yaml.SafeLoader.add_constructor("!macro", macro_constructor)
+
+
+@pytest.fixture
+def macro_store_reset():
+    macro_store.clear()
+
+
+def _temp_file_load(txt: str):
+    """
+    Create a temporary file with txt and load the crawler
+    definition using load_definition from Crawler.
+    """
+    definition = None
+    with NamedTemporaryFile() as f:
+        f.write(txt.encode())
+        f.flush()
+        c = Crawler()
+        definition = c.load_definition(f.name)
+    return definition
+
+
+def test_macros(register_macros, macro_store_reset):
+    dat = yaml.load("""
+defs:
+- !defmacro
+  name: test
+  params:
+    a: 2
+    b: bla
+    c: $variable
+  definition:
+    expanded_$b:
+      blubb: ok$a
+      $b: $c
+
+testnode:
+  obl: !macro
+    test:
+      a: 4
+      b: yea
+""", Loader=yaml.SafeLoader)
+    assert dat["testnode"]["obl"]["expanded_yea"]["blubb"] == "ok4"
+    assert dat["testnode"]["obl"]["expanded_yea"]["yea"] == "$variable"
+    assert "expanded_bla" not in dat["testnode"]["obl"]
+    assert "bla" not in dat["testnode"]["obl"]["expanded_yea"]
+
+
+def test_macro_list_replacment(register_macros, macro_store_reset):
+    dat = yaml.load("""
+defs:
+- !defmacro
+  name: test
+  params:
+    a: 2
+    b: bla
+    c: $variable
+  definition:
+    expanded_$b:
+      blubb:
+      - ok$a
+      - $b: $c
+
+testnode:
+  obl: !macro
+    test:
+      a: 4
+      b: yea
+""", Loader=yaml.SafeLoader)
+    assert isinstance(dat["testnode"]["obl"]["expanded_yea"]["blubb"], list)
+    assert len(dat["testnode"]["obl"]["expanded_yea"]["blubb"]) == 2
+    assert dat["testnode"]["obl"]["expanded_yea"]["blubb"][0] == "ok4"
+    assert dat["testnode"]["obl"]["expanded_yea"]["blubb"][1]["yea"] == "$variable"
+
+
+def test_multi_macros(register_macros, macro_store_reset):
+    dat = yaml.load("""
+defs:
+- !defmacro
+  name: test_one
+  params: {}
+  definition:
+    replaced1: ok
+- !defmacro
+  name: test_two
+  params: {}
+  definition:
+    replaced2: ok
+    replaced3: ok
+
+testnode:
+    obl: !macro
+      test_one:
+      test_two:
+""", Loader=yaml.SafeLoader)
+    assert dat["testnode"]["obl"]["replaced1"] == "ok"
+    assert dat["testnode"]["obl"]["replaced2"] == "ok"
+    assert dat["testnode"]["obl"]["replaced3"] == "ok"
+
+
+def test_multi_macros_toplevel(register_macros, macro_store_reset):
+    """
+    See: https://gitlab.indiscale.com/caosdb/src/caosdb-crawler/-/issues/23
+    """
+    dat_loader = list(yaml.safe_load_all("""
+---
+metadata:
+  macros:
+    - !defmacro
+      name: test_one
+      params: {}
+      definition:
+        replaced1: ok
+    - !defmacro
+      name: test_two
+      params: {}
+      definition:
+        replaced2: ok
+        replaced3: ok
+---
+testnode: !macro
+  test_one:
+  test_two:
+"""))
+    assert len(dat_loader) == 2
+    dat = dat_loader[1]
+    assert dat["testnode"]["replaced1"] == "ok"
+    assert dat["testnode"]["replaced2"] == "ok"
+    assert dat["testnode"]["replaced3"] == "ok"
+
+
+def test_load_definition(register_macros, macro_store_reset):
+    txt = """
+extroot:
+  type: Directory
+  match: extroot
+  subtree:
+    SimulationData:
+      type: Directory
+      match: SimulationData
+  """
+    # Check whether simple cfoods can be loaded:
+    cfood = _temp_file_load(txt)
+    assert cfood["extroot"]["subtree"]["SimulationData"]["match"] == "SimulationData"
+
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+    - !defmacro
+      name: test_one
+      params: {}
+      definition:
+        replaced1: ok
+    - !defmacro
+      name: test_two
+      params:
+        match_name: null
+      definition:
+        type: Directory
+        match: $match_name
+---
+extroot:
+  type: Directory
+  match: extroot
+  subtree:
+    SimulationData:
+      type: Directory
+      match: SimulationData
+extroot2: !macro  # test top level macro
+  test_one:
+extroot3:
+  subtree:
+    SimulationData: !macro
+      test_two:
+        match_name: SimulationData
+    """)
+    assert cfood["extroot"]["subtree"]["SimulationData"]["match"] == "SimulationData"
+    assert cfood["extroot2"]["replaced1"] == "ok"
+    assert cfood["extroot3"]["subtree"]["SimulationData"]["match"] == "SimulationData"
+
+
+@pytest.mark.xfail
+def test_replace_arbitrary_objects(register_macros, macro_store_reset):
+    """
+    See: https://gitlab.indiscale.com/caosdb/src/caosdb-crawler/-/issues/24
+    """
+    dat = yaml.load("""
+defs:
+- !defmacro
+  name: test
+  params:
+    b: 25
+    testvar_list:
+    - a
+    - $b
+    testvar_dict:
+      t1: a
+      t2: $b
+  definition:
+    replaced1:
+      $b: ok
+      c: $testvar_dict
+      d: $testvar_list
+
+testnode:
+  obl: !macro
+    test:
+""", Loader=yaml.SafeLoader)
+    print(yaml.dump(dat))
+    assert dat["testnode"]["obl"]["replaced1"]["c"]["t1"] == "a"
+    assert dat["testnode"]["obl"]["replaced1"]["c"]["t2"] == "25"
+    assert dat["testnode"]["obl"]["replaced1"]["d"][0] == "a"
+    assert dat["testnode"]["obl"]["replaced1"]["d"][1] == "25"
+
+
+def test_circular_macro_definition(register_macros, macro_store_reset):
+    """Test the (ab-)use of macros to create an infinite loop."""
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+    - !defmacro
+      name: test_one
+      params: {}
+      definition: !macro
+        test_two:
+    - !defmacro
+      name: test_two
+      params: {}
+      definition: !macro
+        test_one:
+    - !defmacro
+      name: test_three
+      params: {}
+      definition: !macro
+        test_two:
+    - !defmacro
+      name: test_four
+      params: {}
+      definition: !macro
+        test_four:
+---
+extroot: !macro
+  test_one:
+extroot2: !macro
+  test_three:
+extroot3: !macro
+  test_four:
+    """)
+    # macros in macros can be used, but there are no circles; they stop at the first one.
+    assert "test_one" not in cfood["extroot"]
+    assert cfood["extroot"]["test_two"] is None
+    assert "test_three" not in cfood["extroot2"]
+    assert "test_one" not in cfood["extroot2"]
+    assert cfood["extroot2"]["test_two"] is None
+    # No recursion
+    assert cfood["extroot3"]["test_four"] is None
+
+
+# @pytest.mark.xfail(reason="Fix multiple usage of the same macro.")
+def test_use_macro_twice():
+    """Test that the same macro can be used twice with different parameters in
+    the same CFood element if the name depends on the parameters.
+
+    """
+
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+    - !defmacro
+      name: test_twice
+      params:
+        macro_name: default_name
+        a: 4
+      definition:
+        $macro_name:
+          something:
+            a: $a
+---
+extroot: !macro
+  test_twice:
+  - macro_name: once
+  - macro_name: twice
+    a: 5
+  - {}
+    """)
+    for name in ["once", "twice", "default_name"]:
+        assert name in cfood["extroot"]
+    assert cfood["extroot"]["once"]["something"]["a"] == "4"
+    assert cfood["extroot"]["twice"]["something"]["a"] == "5"
+    assert cfood["extroot"]["default_name"]["something"]["a"] == "4"
diff --git a/unittests/test_scalars_cfood.py b/unittests/test_scalars_cfood.py
new file mode 100644
index 0000000000000000000000000000000000000000..1bf8f0b7d67f00f2018b5b68424d6b9cc17602eb
--- /dev/null
+++ b/unittests/test_scalars_cfood.py
@@ -0,0 +1,57 @@
+#!/bin/python
+# Tests for:
+# https://gitlab.com/caosdb/caosdb-crawler/-/issues/9
+# A. Schlemmer, 06/2021
+
+import pytest
+
+# The main function that is affected by this issue:
+from caoscrawler.converters import handle_value
+from caoscrawler.crawl import Crawler
+# We need the store for the above function
+from caoscrawler.stores import GeneralStore
+
+from test_tool import dircheckstr, rfp
+
+
+@pytest.fixture
+def crawler():
+    crawler = Crawler(debug=True)
+    crawler.crawl_directory(rfp("test_directories", "examples_article"),
+                            rfp("cfoods_scalar.yml"))
+    return crawler
+
+
+def test_handle_value():
+    # Note that we will need this store only, if we also want to test variables substitution:
+    store = GeneralStore()
+
+    # This one should work:
+    assert handle_value("bla", store) == ("bla", "single")
+
+    # These failed:
+    assert handle_value(4, store) == (4, "single")
+    assert handle_value(4.2, store) == (4.2, "single")
+    assert handle_value(True, store) == (True, "single")
+
+    # List test:
+    assert handle_value([4, 3, 2], store) == ([4, 3, 2], "single")
+
+
+def test_record_structure_generation(crawler):
+    subd = crawler.debug_tree[dircheckstr("DataAnalysis")]
+    assert len(subd) == 2
+    # variables store on Data Analysis node of debug tree
+    assert len(subd[0]) == 3
+    assert "Data" in subd[0]
+    assert "DataAnalysis" in subd[0]
+    assert "RecordThatGetsParentsLater" in subd[0]
+
+    prop = subd[0]["RecordThatGetsParentsLater"].get_property("someId")
+    assert type(prop.value) == int
+    assert prop.value == 23
+
+    # record store on Data Analysis node of debug tree
+    assert len(subd[1]) == 1
+    prop2 = subd[1]["RecordThatGetsParentsLater"].get_property("someId")
+    assert prop == prop2
diff --git a/unittests/test_table_converter.py b/unittests/test_table_converter.py
new file mode 100644
index 0000000000000000000000000000000000000000..85255d3efd34dc666d5d2e97423f33177dea6732
--- /dev/null
+++ b/unittests/test_table_converter.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+#
+
+"""
+test the converters module
+"""
+
+from caoscrawler.converters import Converter
+from caoscrawler.stores import GeneralStore
+from caoscrawler.converters import (ConverterValidationError,
+                                    DictConverter, XLSXTableConverter, CSVTableConverter)
+from caoscrawler.structure_elements import Directory
+from caoscrawler.structure_elements import (File, DictTextElement,
+                                            DictListElement, DictElement,
+                                            DictBooleanElement, DictDictElement,
+                                            DictIntegerElement, DictFloatElement)
+
+from os.path import join, dirname, basename
+
+from caoscrawler.identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
+
+import pytest
+import os
+import importlib
+
+import math
+
+from caoscrawler import Crawler
+
+import caosdb as db
+
+
+@pytest.fixture
+def converter_registry():
+    converter_registry: dict[str, dict[str, str]] = {
+        "Directory": {
+            "converter": "DirectoryConverter",
+            "package": "caoscrawler.converters"},
+        "CSVTableConverter": {
+            "converter": "CSVTableConverter",
+            "package": "caoscrawler.converters"},
+        "XLSXTableConverter": {
+            "converter": "XLSXTableConverter",
+            "package": "caoscrawler.converters"},
+
+        "DictDictElement": {
+            "converter": "DictDictElementConverter",
+            "package": "caoscrawler.converters"},
+        "DictTextElement": {
+            "converter": "DictTextElementConverter",
+            "package": "caoscrawler.converters"},
+        "DictIntegerElement": {
+            "converter": "DictIntegerElementConverter",
+            "package": "caoscrawler.converters"},
+        "DictFloatElement": {
+            "converter": "DictFloatElementConverter",
+            "package": "caoscrawler.converters"},
+    }
+
+
+def rfp(*pathcomponents):
+    """
+    Return full path.
+    Shorthand convenience function.
+    """
+    return join(dirname(__file__), *pathcomponents)
+
+
+def dircheckstr(*pathcomponents):
+    """
+    Return the debug tree identifier for a given path.
+    """
+    return "caoscrawler.structure_elements.File: " + basename(join(*pathcomponents)) + ", " + rfp("test_directories", "examples_tables", "ExperimentalData", *pathcomponents)
+
+
+@pytest.fixture
+def crawler():
+    crawler = Crawler(debug=True)
+    crawler.crawl_directory(rfp("test_directories", "examples_tables", "ExperimentalData"),
+                            rfp("test_directories", "examples_tables", "crawler_for_tables.yml"))
+    return crawler
+
+
+def test_convert_table(converter_registry):
+    extentions = ["xlsx", "csv", "tsv"]
+    if importlib.util.find_spec("odf") is not None:
+        extentions.append("ods")
+    for file_ext in extentions:
+        def_opt = {"skiprows": ["1", "2"], "header": 0}
+        if file_ext == "tsv":
+            def_opt["sep"] = "\t"
+        if file_ext in ["csv", "tsv"]:
+            converter = CSVTableConverter(
+                def_opt,
+                "Tab",
+                converter_registry)
+        else:
+            converter = XLSXTableConverter(
+                def_opt,
+                "Tab",
+                converter_registry)
+        store = GeneralStore()
+        file_element = File("table." + file_ext,
+                            rfp("test_tables", "test1." + file_ext))
+        res = converter.create_children(store,
+                                        file_element)
+        assert len(res) == 5
+        for i in range(5):
+            assert res[i].name == str(i)
+            assert type(res[i].name) == str
+            assert type(res[i].value) == dict
+            assert len(res[i].value) == 4
+            assert type(res[i].value["Col_1"]) == int
+            assert res[i].value["Col_1"] == i
+            assert type(res[i].value["Col_2"]) == float
+            assert type(res[i].value["Col_3"]) == int
+            if i != 3:
+                assert type(res[i].value["text"]) == str
+            else:
+                assert type(res[i].value["text"]) == float  # the nan value
+                assert math.isnan(res[i].value["text"])
+
+    # Using an index col:
+    converter = XLSXTableConverter(
+        {"skiprows": ["1", "2"], "header": 0, "index_col": "3"},
+        "XLSXTable",
+        converter_registry)
+    store = GeneralStore()
+    file_element = File("table.xlsx",
+                        rfp("test_tables", "test1.xlsx"))
+    res = converter.create_children(store,
+                                    file_element)
+    assert res[0].name == "jdsfkljadskf"
+
+
+def test_crawl_csv_table(crawler):
+    for file_ext in ["xlsx", "csv"]:
+        subd = crawler.debug_tree[dircheckstr("test1." + file_ext)]
+        record_experiment = subd[1]["Experiment"]
+        assert isinstance(record_experiment, db.Record)
+        assert isinstance(record_experiment.get_property("Measurements").value, list)
+        assert len(record_experiment.get_property("Measurements").value) == 5
+        prop_measure = record_experiment.get_property("Measurements").value[2]
+        assert isinstance(prop_measure, db.Record)
+        assert prop_measure.get_property("Col_1").value == "2"
diff --git a/unittests/test_tables/test1.csv b/unittests/test_tables/test1.csv
new file mode 100644
index 0000000000000000000000000000000000000000..c2eb297b523c06729937a07221c695105df0b09c
--- /dev/null
+++ b/unittests/test_tables/test1.csv
@@ -0,0 +1,8 @@
+Col_1,Col_2,Col_3,text
+Index,description,,
+,m,s,
+0,12,1,jdsfkljadskf
+1,14,3,jdkfljad
+2,3,4,jadkfjdsk
+3,4.5,6,
+4,8,7,jadskfj
diff --git a/unittests/test_tables/test1.ods b/unittests/test_tables/test1.ods
new file mode 100644
index 0000000000000000000000000000000000000000..6d5138b496511b02d0e6104868b6ba1e6816bfb6
Binary files /dev/null and b/unittests/test_tables/test1.ods differ
diff --git a/unittests/test_tables/test1.tsv b/unittests/test_tables/test1.tsv
new file mode 100644
index 0000000000000000000000000000000000000000..69286fcecd82c955f900bcdf7e6b5adfe26ab8c8
--- /dev/null
+++ b/unittests/test_tables/test1.tsv
@@ -0,0 +1,8 @@
+Col_1	Col_2	Col_3	text
+Index	description		
+	m	s	
+0	12	1	jdsfkljadskf
+1	14	3	jdkfljad
+2	3	4	jadkfjdsk
+3	4.5	6	
+4	8	7	jadskfj
diff --git a/unittests/test_tables/test1.xlsx b/unittests/test_tables/test1.xlsx
new file mode 100644
index 0000000000000000000000000000000000000000..2bf68c8a854ae7f618e47e1db58490fc76c055b2
Binary files /dev/null and b/unittests/test_tables/test1.xlsx differ
diff --git a/unittests/test_tool.py b/unittests/test_tool.py
index 1e7f10069c49ce6cab71da5f469e28b69158b4b5..a190efdeaaa9b3ede8d6fc1b9d1fb2d6e0d9c210 100755
--- a/unittests/test_tool.py
+++ b/unittests/test_tool.py
@@ -3,11 +3,14 @@
 # Adapted from check-sfs
 # A. Schlemmer, 06/2021
 
-from caoscrawler import Crawler
+from caoscrawler.crawl import Crawler, SecurityMode
 from caoscrawler.structure_elements import File, DictTextElement, DictListElement
 from caoscrawler.identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
+from simulated_server_data import full_data
 from functools import partial
 from copy import deepcopy
+from unittest.mock import patch
+import caosdb.common.models as dbmodels
 from unittest.mock import MagicMock, Mock
 from os.path import join, dirname, basename
 import yaml
@@ -26,11 +29,19 @@ def rfp(*pathcomponents):
     return join(dirname(__file__), *pathcomponents)
 
 
+ident = LocalStorageIdentifiableAdapter()
+ident.restore_state(rfp("records.xml"))
+full_data.update({el.name: el for el in ident._records if el.name is not None})
+full_data.update({el.id: el for el in ident._records if el.name is None})
+
+
 def dircheckstr(*pathcomponents):
     """
     Return the debug tree identifier for a given path.
     """
-    return "caoscrawler.structure_elements.Directory: " + basename(join(*pathcomponents)) + ", " + rfp("test_directories", "examples_article", *pathcomponents)
+    return ("caoscrawler.structure_elements.Directory: " + basename(
+        join(*pathcomponents)) + ", " + rfp(
+            "test_directories", "examples_article", *pathcomponents))
 
 
 @pytest.fixture
@@ -84,7 +95,7 @@ def test_record_structure_generation(crawler):
 
     # The data analysis node creates one variable for the node itself:
     assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis"
-    assert subc[0]["DataAnalysis"] == False
+    assert subc[0]["DataAnalysis"] is False
 
     subd = crawler.debug_tree[dircheckstr(
         "DataAnalysis", "2020_climate-model-predict")]
@@ -104,9 +115,9 @@ def test_record_structure_generation(crawler):
     assert subd[0]["Project"].__class__ == db.Record
 
     assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis"
-    assert subc[0]["DataAnalysis"] == True
+    assert subc[0]["DataAnalysis"] is True
     assert subd[0]["project_dir"] == "examples_article/DataAnalysis/2020_climate-model-predict"
-    assert subc[0]["project_dir"] == False
+    assert subc[0]["project_dir"] is False
 
     # Check the copy flags for the first level in the hierarchy:
     assert len(subc[0]) == 6
@@ -173,7 +184,7 @@ def test_record_structure_generation(crawler):
 
 def test_ambigious_records(crawler, ident):
     ident.get_records().clear()
-    ident.get_records().extend(crawler.updateList)
+    ident.get_records().extend(crawler.target_data)
     r = ident.get_records()
     id_r0 = ident.get_identifiable(r[0])
     with raises(RuntimeError, match=".*unambigiously.*"):
@@ -195,10 +206,11 @@ def test_crawler_update_list(crawler, ident):
     ) == 2
 
     # The crawler contains lots of duplicates, because identifiables have not been resolved yet:
-    assert len(ident.get_records()) != len(crawler.updateList)
+    assert len(ident.get_records()) != len(crawler.target_data)
 
     # Check consistency:
-    # Check whether identifiables retrieved from current identifiable store return the same results.
+    # Check whether identifiables retrieved from current identifiable store return
+    # the same results.
 
     # take the first person in the list of records:
     for r in ident.get_records():
@@ -289,10 +301,10 @@ def test_remove_unnecessary_updates():
 
     # test property difference case
     # TODO this should work right?
-    #upl = [db.Record().add_parent("A").add_property("a", 3)]
+    # upl = [db.Record().add_parent("A").add_property("a", 3)]
     # irs = [db.Record().add_parent("A")]  # ID should be s
-    #Crawler.remove_unnecessary_updates(upl, irs)
-    #assert len(upl) == 1
+    # Crawler.remove_unnecessary_updates(upl, irs)
+    # assert len(upl) == 1
 
     # test value difference case
     upl = [db.Record().add_parent("A").add_property("a", 5)]
@@ -327,7 +339,7 @@ def test_identifiable_adapter_no_identifiable(crawler, ident):
     insl, updl = crawler.synchronize()
     assert len(updl) == 0
 
-    pers = [r for r in crawler.updateList if r.parents[0].name == "Person"]
+    pers = [r for r in crawler.target_data if r.parents[0].name == "Person"]
     # All persons are inserted, because they are not identifiable:
     assert len(insl) == len(pers)
 
@@ -347,22 +359,24 @@ def test_provenance_debug_data(crawler):
     assert check_key_count("Person") == 14
 
 
+def basic_retrieve_by_name_mock_up(rec, known):
+    """ returns a stored Record if rec.name is an existing key, None otherwise """
+    if rec.name in known:
+        return known[rec.name]
+    else:
+        return None
+
+
 @pytest.fixture
-def mock_retrieve(crawler):
-    # simulate remote server content by using the names to identify records
-    def base_mocked_lookup(rec, known):
-        if rec.name in known:
-            return known[rec.name]
-        else:
-            return None
-
-    # a record that is found remotely and should be added to the update list and one that is not
-    # found and should be added to the insert one
-    remote_known = {"A": db.Record(id=1111, name="A")}
+def crawler_mocked_identifiable_retrieve(crawler):
+    # mock retrieval of registered identifiabls: return Record with just a parent
+    crawler.identifiableAdapter.get_registered_identifiable = Mock(
+        side_effect=lambda x: db.Record().add_parent(x.parents[0].name))
+
+    # Simulate remote server content by using the names to identify records
+    # There is only a single known Record with name A
     crawler.identifiableAdapter.retrieve_identified_record_for_record = Mock(side_effect=partial(
-        base_mocked_lookup, known=remote_known))
-    crawler.identifiableAdapter.get_registered_identifiable = (
-        lambda x: db.Record().add_parent(x.parents[0].name))
+        basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
     return crawler
 
 
@@ -371,9 +385,8 @@ def test_split_into_inserts_and_updates_trivial(crawler):
     crawler.split_into_inserts_and_updates([])
 
 
-def test_split_into_inserts_and_updates_single(mock_retrieve):
-    crawler = mock_retrieve
-
+def test_split_into_inserts_and_updates_single(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
     entlist = [db.Record(name="A").add_parent(
         "C"), db.Record(name="B").add_parent("C")]
 
@@ -391,10 +404,13 @@ def test_split_into_inserts_and_updates_single(mock_retrieve):
     assert insert[0].name == "B"
     assert len(update) == 1
     assert update[0].name == "A"
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
 
 
-def test_split_into_inserts_and_updates_with_duplicate(mock_retrieve):
-    crawler = mock_retrieve
+def test_split_into_inserts_and_updates_with_duplicate(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
     a = db.Record(name="A").add_parent("C")
     b = db.Record(name="B").add_parent("C")
     b.add_property("A", a)
@@ -406,10 +422,13 @@ def test_split_into_inserts_and_updates_with_duplicate(mock_retrieve):
     assert insert[0].name == "B"
     assert len(update) == 1
     assert update[0].name == "A"
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
 
 
-def test_split_into_inserts_and_updates_with_ref(mock_retrieve):
-    crawler = mock_retrieve
+def test_split_into_inserts_and_updates_with_ref(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
     # try it with a reference
     a = db.Record(name="A").add_parent("C")
     b = db.Record(name="B").add_parent("C")
@@ -420,20 +439,23 @@ def test_split_into_inserts_and_updates_with_ref(mock_retrieve):
     assert insert[0].name == "B"
     assert len(update) == 1
     assert update[0].name == "A"
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
 
 
-def test_split_into_inserts_and_updates_with_circ(mock_retrieve):
+def test_split_into_inserts_and_updates_with_circ(crawler):
     # try circular
-    crawler = mock_retrieve
     a = db.Record(name="A").add_parent("C")
     b = db.Record(name="B").add_parent("C")
     b.add_property("A", a)
     a.add_property("B", b)
     entlist = [a, b]
+    # TODO this does not seem to be complete!
 
 
-def test_split_into_inserts_and_updates_with_complex(mock_retrieve):
-    crawler = mock_retrieve
+def test_split_into_inserts_and_updates_with_complex(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
     #      A
     #      ^
     #      |
@@ -452,12 +474,15 @@ def test_split_into_inserts_and_updates_with_complex(mock_retrieve):
     assert "B" in [el.name for el in insert]
     assert len(update) == 1
     assert update[0].name == "A"
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
 
     # TODO write test where the unresoled entity is not part of the identifiable
 
 
-def test_split_into_inserts_and_updates_with_copy_attr(mock_retrieve):
-    crawler = mock_retrieve
+def test_split_into_inserts_and_updates_with_copy_attr(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
     # assume identifiable is only the name
     a = db.Record(name="A").add_parent("C")
     a.add_property("foo", 1)
@@ -468,17 +493,17 @@ def test_split_into_inserts_and_updates_with_copy_attr(mock_retrieve):
 
     assert update[0].get_property("bar").value == 2
     assert update[0].get_property("foo").value == 1
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
 
 
 def test_all_references_are_existing_already(crawler):
-    def base_mocked_lookup(rec, known):
-        if rec.name in known:
-            return known[rec.name]
-        else:
-            return None
+    # Simulate remote server content by using the names to identify records
+    # There are only two known Records with name A and B
     crawler.identifiableAdapter.get_registered_identifiable = Mock(side_effect=partial(
-        base_mocked_lookup, known={"A": db.Record(name="A").add_parent("C"),
-                                   "B": db.Record(name="B").add_parent("C")}))
+        basic_retrieve_by_name_mock_up, known={"A": db.Record(name="A").add_parent("C"),
+                                               "B": db.Record(name="B").add_parent("C")}))
 
     assert crawler.all_references_are_existing_already(
         db.Record().add_property('a', 123))
@@ -496,6 +521,8 @@ def test_all_references_are_existing_already(crawler):
     assert crawler.all_references_are_existing_already(db.Record()
                                                        .add_property('a', 123)
                                                        .add_property('b', a))
+    # if this ever fails, the mock up may be removed
+    crawler.identifiableAdapter.get_registered_identifiable.assert_called()
 
 
 def test_can_be_checked_externally(crawler):
@@ -512,12 +539,155 @@ def test_can_be_checked_externally(crawler):
                                                  .add_property('b', db.Record()))
 
 
-def test_replace_entities_by_ids(crawler):
+def test_replace_entities_with_ids(crawler):
     a = (db.Record().add_parent("B").add_property("A", 12345)
          .add_property("B", db.Record(id=12345))
          .add_property("C", [db.Record(id=12345), 233324]))
 
-    crawler.replace_entities_by_ids(a)
+    crawler.replace_entities_with_ids(a)
     assert a.get_property("A").value == 12345
     assert a.get_property("B").value == 12345
     assert a.get_property("C").value == [12345, 233324]
+
+
+def mock_get_entity_by_id(id):
+    candidates = [el for el in list(full_data.values()) if el.id == id]
+    if len(candidates) > 0:
+        return candidates[0]
+    else:
+        raise ValueError()
+
+
+def mock_get_entity_by_name(name):
+    candidates = [el for el in full_data.values()
+                  if (el.name is not None and el.name.lower() == name.lower())]
+    if len(candidates) > 0:
+        return candidates[0]
+    else:
+        raise ValueError()
+
+
+def prepare_crawler_with_sec_mode(mode, ident):
+    crawler = Crawler(debug=True, securityMode=mode)
+    crawler.crawl_directory(rfp("test_directories", "examples_article"),
+                            rfp("scifolder_cfood.yml"))
+    crawler.identifiableAdapter = ident
+
+    return crawler
+
+
+def reset_mocks(mocks):
+    for mock in mocks:
+        mock.reset_mock()
+
+
+def change_identifiable_prop(ident):
+    # the checks in here are only to make sure we change the record as we intend to
+    meas = ident._records[-2]
+    assert meas.parents[0].name == "Measurement"
+    resps = meas.properties[0]
+    assert resps.name == "date"
+    # change one element; This changes the date which is part of the identifiable
+    resps.value = "2022-01-04"
+
+
+def change_non_identifiable_prop(ident):
+    # the checks in here are only to make sure we change the record as we intend to
+    meas = ident._records[-1]
+    assert meas.parents[0].name == "Measurement"
+    resps = meas.properties[-1]
+    assert resps.name == "responsible"
+    assert len(resps.value) == 2
+    # change one element; This removes a responsible which is not part of the identifiable
+    del resps.value[-1]
+
+
+@patch("caoscrawler.crawl.Crawler._get_entity_by_id",
+       new=Mock(side_effect=mock_get_entity_by_id))
+@patch("caoscrawler.crawl.Crawler._get_entity_by_name",
+       new=Mock(side_effect=mock_get_entity_by_name))
+@patch("caoscrawler.crawl.db.Container.insert")
+@patch("caoscrawler.crawl.db.Container.update")
+@patch("caoscrawler.crawl.UpdateCache.insert")
+def test_security_mode(updateCacheMock, upmock, insmock, ident):
+    records_backup = deepcopy(ident._records)
+
+    # trivial case: nothing to do
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.RETRIEVE, ident)
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.assert_not_called()
+    upmock.assert_not_called()
+    updateCacheMock.assert_not_called()
+
+    # RETRIEVE: insert only
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.RETRIEVE, ident)
+    # remove one element
+    del ident._records[-1]
+    # insert forbidden
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.assert_not_called()
+    upmock.assert_not_called()
+    assert updateCacheMock.call_count == 1
+    # reset counts
+    reset_mocks([updateCacheMock, insmock, upmock])
+    # restore original ident
+    ident._records = deepcopy(records_backup)
+
+    # RETRIEVE: update only
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.RETRIEVE, ident)
+    # change one element
+    change_non_identifiable_prop(ident)
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.assert_not_called()
+    upmock.assert_not_called()
+    assert updateCacheMock.call_count == 1
+    # reset counts
+    reset_mocks([updateCacheMock, insmock, upmock])
+    # restore original ident
+    ident._records = deepcopy(records_backup)
+
+    # INSERT: insert only
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.INSERT, ident)
+    # remove one element
+    del ident._records[-1]
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.assert_called_once()
+    upmock.assert_not_called()
+    updateCacheMock.assert_not_called()
+    # reset counts
+    reset_mocks([updateCacheMock, insmock, upmock])
+    # restore original ident
+    ident._records = deepcopy(records_backup)
+
+    # INSERT: update only
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.INSERT, ident)
+    # change one element
+    change_non_identifiable_prop(ident)
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.assert_not_called()
+    upmock.assert_not_called()
+    updateCacheMock.assert_called_once()
+    # reset counts
+    reset_mocks([updateCacheMock, insmock, upmock])
+    # restore original ident
+    ident._records = deepcopy(records_backup)
+
+    # INSERT: insert and update
+    crawler = prepare_crawler_with_sec_mode(SecurityMode.INSERT, ident)
+    # change two elements
+    change_non_identifiable_prop(ident)
+    change_identifiable_prop(ident)
+    crawler.synchronize(commit_changes=True)
+    assert crawler.run_id is not None
+    insmock.asser_called_once()
+    upmock.assert_not_called()
+    updateCacheMock.assert_called_once()
+    # reset counts
+    reset_mocks([updateCacheMock, insmock, upmock])
+    # restore original ident
+    ident._records = deepcopy(records_backup)
diff --git a/unittests/test_validation.py b/unittests/test_validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..686c66f72f55b66344322e0c6f3b9d1a2b76b3f9
--- /dev/null
+++ b/unittests/test_validation.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# ** header v3.0
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Alexander Schlemmer
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+# ** end header
+#
+
+"""
+Test the validation of cfood definition files.
+"""
+
+from caoscrawler.crawl import Crawler
+
+from tempfile import NamedTemporaryFile
+
+import yaml
+import pytest
diff --git a/unittests/test_variable_substitutions.py b/unittests/test_variable_substitutions.py
index 071bf4646d20e35ed05dafaf5fabf786dc182dcc..203197b7f8af51605a413ac354a0426d61c9c0cb 100644
--- a/unittests/test_variable_substitutions.py
+++ b/unittests/test_variable_substitutions.py
@@ -40,6 +40,15 @@ def crawler():
     return crawler
 
 
+@pytest.fixture
+def crawler_2():
+    crawler = Crawler(debug=True)
+    crawler.crawl_directory(rfp("test_directories", "example_substitutions", "ExperimentalData"),
+                            rfp("test_directories", "example_substitutions",
+                                "substitutions_parents.yml"))
+    return crawler
+
+
 def test_substitutions(crawler):
     # @review Florian Spreckelsen 2022-05-13
     for i in range(2):
@@ -59,3 +68,18 @@ def test_substitutions(crawler):
         assert isinstance(subd[i]["Project"].get_property("dates").value, list)
         assert subd[i]["Project"].get_property(
             "dates").value[0] == "2022-05-12"
+
+
+def test_substitutions_parents(crawler_2):
+    # This is a test for:
+    # https://gitlab.indiscale.com/caosdb/src/caosdb-crawler/-/issues/35
+    # ... testing whether variable substitutions can be used in parent declarations.
+    subd = crawler_2.debug_tree[dircheckstr(
+        "File", "ExperimentalData", "220512_data.dat")]
+    # subd[0] <- generalStore
+    # subd[1] <- recordStore
+
+    parents = subd[1]["Experiment"].get_parents()
+    assert len(parents) == 2
+    assert parents[0].name == "Experiment"
+    assert parents[1].name == "Month_05"