diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml
index e5bb4c9b8ca6ad1750922cb07c92cd6c5eb77c6b..02ccac5c48e039a3374a0d169f3b355f897e45fc 100644
--- a/.docker/docker-compose.yml
+++ b/.docker/docker-compose.yml
@@ -34,6 +34,7 @@ services:
       DEBUG: 1
       CAOSDB_CONFIG_AUTHTOKEN_CONFIG: "conf/core/authtoken.example.yaml"
       CAOSDB_CONFIG_TRANSACTION_BENCHMARK_ENABLED: "TRUE"
+      CAOSDB_CONFIG__CAOSDB_INTEGRATION_TEST_SUITE_KEY: 10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2
 volumes:
   scripting:
   authtoken:
diff --git a/.gitignore b/.gitignore
index 5599d7d263c8927025e128c37eabb185025bf96b..4402aa11bc399c03400c4427c669b93ebb2637ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,7 +3,6 @@ src/caoscrawler.egg-info/
 __pycache__
 .tox
 TAGS
-src/.coverage
 build/
 *~
 .pdbrc
@@ -17,3 +16,4 @@ provenance.yml
 src/doc/_apidoc/
 start_caosdb_docker.sh
 src/doc/_apidoc
+/dist/
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 30a8cd8fe4c08fd3fe0f3f98aaa56b83cb623086..bfac6b0012cb067657567381752a600736e7d788 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,7 +3,7 @@
 #
 # Copyright (C) 2018 Research Group Biomedical Physics,
 # Max-Planck-Institute for Dynamics and Self-Organization Göttingen
-# Copyright (C) 2019 Henrik tom Wörden 
+# Copyright (C) 2019 Henrik tom Wörden
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -113,14 +113,34 @@ info:
   script:
     - *env
 
-unittest:
+unittest_py3.9:
   tags: [cached-dind]
-  image: docker:20.10
   stage: test
   image: $CI_REGISTRY_IMAGE
   script:
       - tox
 
+unittest_py3.8:
+  tags: [cached-dind]
+  stage: test
+  image: python:3.8
+  script: &python_test_script
+    # install dependencies
+    - pip install pytest pytest-cov
+    # TODO: Use f-branch logic here
+    - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev
+    - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev
+    - pip install .
+    # actual test
+    - caosdb-crawler --help
+    - pytest --cov=caosdb -vv ./unittests
+
+unittest_py3.10:
+  tags: [cached-dind]
+  stage: test
+  image: python:3.10
+  script: *python_test_script
+
 inttest:
   tags: [docker]
   services:
@@ -224,7 +244,7 @@ build-testenv:
 
       - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
         # use here general latest or specific branch latest...
-      - docker build 
+      - docker build
         --build-arg PYLIB=${PYLIB}
         --build-arg ADVANCED=${ADVANCED:dev}
         --file .docker/Dockerfile
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8d80a17c3ad6321e115e523d1ca8082385e50b8b..02dcb340a1055881a12c31d90e079c2a5859a041 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,7 +5,71 @@ All notable changes to this project will be documented in this file.
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
-## [Unreleased]
+## [Unreleased] ##
+
+### Added ###
+
+- Identifiable class to represent the information used to identify Records.
+- Added some StructureElements: BooleanElement, FloatElement, IntegerElement, 
+  ListElement, DictElement
+- String representation for Identifiables
+
+### Changed ###
+
+- Some StructureElements changed (see "How to upgrade" in the docs):
+  - Dict, DictElement and DictDictElement were merged into DictElement.
+  - DictTextElement and TextElement were merged into TextElement. The "match"
+    keyword is now invalid for TextElements.
+
+
+### Deprecated ###
+- The DictXYElements are now depricated and are now synonyms for the
+  XYElements.
+
+### Removed ###
+
+### Fixed ###
+
+- [#39](https://gitlab.com/caosdb/caosdb-crawler/-/issues/39) Merge conflicts in
+  `split_into_inserts_and_updates` when cached entity references a record
+  without id
+
+### Security ###
+
+### Documentation ###
+
+## [0.2.0] - 2022-11-18 ##
+(Florian Spreckelsen)
+
+### Added ###
+- the -c/--add-cwd-to-path option allows to plays for example custom converter
+  modules into the current working directory(cwd) since the cwd is added to
+  the Python path.
+
+### Changed ###
+
+- Converters often used in dicts (DictFloatElementConverter,
+  DictIntegerElementConverter, ...) do now accept other StructureElements by
+  default. For example a DictIntegerElement is accepted by default instead of a
+  DictFloatElement. This behavior can be changed (see converter documentation).
+  **Note** This might lead to additional matches compared to previous versions.
+- `_AbstractDictElementConverter` uses `re.DOTALL` for `match_value`
+- The "fallback" parent, the name of the element in the cfood, is only used
+  when the object is created and only if there are no parents given.
+
+### Fixed ###
+
+* [#31](https://gitlab.com/caosdb/caosdb-crawler/-/issues/31) Identified cache:
+  Hash is the same for Records without IDs
+* [#30](https://gitlab.com/caosdb/caosdb-crawler/-/issues/30)
+* [#23](https://gitlab.com/caosdb/caosdb-crawler/-/issues/23) Crawler may
+  overwrite and delete existing data in case of manually added properties
+* [#10](https://gitlab.com/caosdb/caosdb-crawler/-/issues/10) floats can be
+  interpreted as integers and vice versa, there are defaults for allowing other
+  types and this can be changed per converter
+
+## [0.1.0] - 2022-10-11
+(Florian Spreckelsen)
 
 ### Added
 
@@ -23,10 +87,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 * MAINT: Renamed module from `newcrawler` to `caoscrawler`
 * MAINT: Removed global converters from `crawl.py`
 
-### Deprecated
-
-### Removed
-
 ### Fixed
 
 * FIX: #12
@@ -35,8 +95,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 * FIX: #35 Parent cannot be set from value
 * [#6](https://gitlab.com/caosdb/caosdb-crawler/-/issues/6): Fixed many type
   hints to be compatible to python 3.8
-* [#9](https://gitlab.com/caosdb/caosdb-crawler/-/issues/9): Sclaras of types
+* [#9](https://gitlab.com/caosdb/caosdb-crawler/-/issues/9): Scalars of types
   different than string can now be given in cfood definitions
-
-
-### Security
diff --git a/README.md b/README.md
index 8576e5c969556005fdeb346ef2cdfadf1b7fc266..6c94473c066439b1645712c0046cd890b6b38715 100644
--- a/README.md
+++ b/README.md
@@ -1,34 +1,57 @@
-# caoscrawler
+# CaosDB-Crawler
 
-A new crawler for CaosDB.
+## Welcome
 
+This is the repository of the CaosDB-Crawler, a tool for automatic data
+insertion into [CaosDB](https://gitlab.com/caosdb/caosdb-meta).
 
-This package has yaml-header-tools as a dependency:
-https://gitlab.com/salexan/yaml-header-tools
+This is a new implementation resolving  problems of the original implementation
+in [caosdb-advancedtools](https://gitlab.com/caosdb/caosdb-advanced-user-tools)
 
+## Setup
 
+Please read the [README_SETUP.md](README_SETUP.md) for instructions on how to
+setup this code.
 
-This python package can be installed using `pip`, e.g.:
-```bash
-pip install --user .
-```
 
-# Usage
+## Further Reading
 
-work in progress
+Please refer to the [official documentation](https://docs.indiscale.com/caosdb-crawler/) of the CaosDB-Crawler for more information.
 
-# Running the tests
+## Contributing
 
-After installation of the package run (within the project folder):
+Thank you very much to all contributers—[past, present](https://gitlab.com/caosdb/caosdb/-/blob/dev/HUMANS.md), and prospective ones.
 
-```bash
-pytest
-```
+### Code of Conduct
+
+By participating, you are expected to uphold our [Code of Conduct](https://gitlab.com/caosdb/caosdb/-/blob/dev/CODE_OF_CONDUCT.md).
+
+### How to Contribute
+
+* You found a bug, have a question, or want to request a feature? Please 
+[create an issue](https://gitlab.com/caosdb/caosdb-crawler).
+* You want to contribute code?
+    * **Forking:** Please fork the repository and create a merge request in GitLab and choose this repository as
+      target. Make sure to select "Allow commits from members who can merge the target branch" under
+      Contribution when creating the merge request. This allows our team to work with you on your
+      request.
+    * **Code style:** This project adhers to the PEP8 recommendations, you can test your code style
+      using the `autopep8` tool (`autopep8 -i -r ./`).  Please write your doc strings following the
+      [NumpyDoc](https://numpydoc.readthedocs.io/en/latest/format.html) conventions.
+* You can also contact us at **info (AT) caosdb.de** and join the
+  CaosDB community on
+  [#caosdb:matrix.org](https://matrix.to/#/!unwwlTfOznjEnMMXxf:matrix.org).
+
+
+There is the file `unittests/records.xml` that servers as a dummy for a server state with files.
+You can recreate this by uncommenting a section in `integrationtests/basic_example/test_basic.py`
+and rerunning the integration test.
 
 ## Integration Tests
+
 see `integrationtests/README.md`
 
-# Contributers
+## Contributers
 
 The original authors of this package are:
 
@@ -36,11 +59,11 @@ The original authors of this package are:
 - Henrik tom Wörden
 - Florian Spreckelsen
 
-# License
+## License
 
-Copyright (C) 2021 Research Group Biomedical Physics, Max Planck Institute for
-Dynamics and Self-Organization Göttingen.
-Copyright (C) 2021 IndiScale GmbH
+Copyright (C) 2021-2022 Research Group Biomedical Physics, Max Planck Institute
+                        for Dynamics and Self-Organization Göttingen.
+Copyright (C) 2021-2022 IndiScale GmbH
 
 All files in this repository are licensed under a [GNU Affero General Public
 License](LICENCE) (version 3 or later).
diff --git a/README_SETUP.md b/README_SETUP.md
new file mode 120000
index 0000000000000000000000000000000000000000..d478016ecde09dab8820d398b15df325f4159380
--- /dev/null
+++ b/README_SETUP.md
@@ -0,0 +1 @@
+src/doc/README_SETUP.md
\ No newline at end of file
diff --git a/RELEASE_GUIDELINES.md b/RELEASE_GUIDELINES.md
new file mode 100644
index 0000000000000000000000000000000000000000..d6bc2c9ae41b8032a5567f786eb060d7b67d2cc5
--- /dev/null
+++ b/RELEASE_GUIDELINES.md
@@ -0,0 +1,47 @@
+# Release Guidelines for the CaosDB Python Client Library
+
+This document specifies release guidelines in addition to the general release
+guidelines of the CaosDB Project
+([RELEASE_GUIDELINES.md](https://gitlab.com/caosdb/caosdb/blob/dev/RELEASE_GUIDELINES.md))
+
+## General Prerequisites
+
+* All tests are passing.
+* FEATURES.md is up-to-date and a public API is being declared in that document.
+* CHANGELOG.md is up-to-date.
+* dependencies in `setup.cfg` are up-to-date.
+
+## Steps
+
+1. Create a release branch from the dev branch. This prevents further changes
+   to the code base and a never ending release process. Naming: `release-<VERSION>`
+
+2. Update CHANGELOG.md
+
+3. Check all general prerequisites.
+
+4. Update the version:
+   - `version` variables in `src/doc/conf.py`
+   - Version in [setup.cfg](./setup.cfg): Check the `MAJOR`, `MINOR`, `MICRO`, `PRE` variables and set
+     `ISRELEASED` to `True`. Use the possibility to issue pre-release versions for testing.
+
+5. Merge the release branch into the main branch.
+
+6. Tag the latest commit of the main branch with `v<VERSION>`.
+
+7. Delete the release branch.
+
+8. Remove possibly existing `./dist` directory with old release.
+
+9. Publish the release by executing `./release.sh` with uploads the caosdb
+   module to the Python Package Index [pypi.org](https://pypi.org).
+
+10. Merge the main branch back into the dev branch.
+
+11. After the merge of main to dev, start a new development version by
+    increasing at least the micro version in [setup.cfg](./setup.cfg) and
+    preparing CHANGELOG.md.
+
+12. Create releases on gitlab.com and gitlab.indiscale.com that contain (at
+    least) the most recent section of the CHANGELOG as the description and link
+    to the PyPi package.
diff --git a/integrationtests/basic_example/test_basic.py b/integrationtests/basic_example/test_basic.py
index b24a1c658cfc9e23ca0ba2de266161864cb6b66c..0c847b08a729f3b112cbdf3c38bac31309cda125 100755
--- a/integrationtests/basic_example/test_basic.py
+++ b/integrationtests/basic_example/test_basic.py
@@ -1,7 +1,6 @@
 #!/usr/bin/env python3
 # encoding: utf-8
 #
-# ** header v3.0
 # This file is a part of the CaosDB Project.
 #
 # Copyright (C) 2021 Indiscale GmbH <info@indiscale.com>
@@ -21,11 +20,10 @@
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
 #
-# ** end header
 #
 
 """
-module description
+an integration test module that does basic integration tests
 """
 
 from caosadvancedtools.crawler import Crawler as OldCrawler
@@ -35,15 +33,15 @@ import argparse
 import sys
 from argparse import RawTextHelpFormatter
 from caoscrawler import Crawler, SecurityMode
+from caoscrawler.identifiable import Identifiable
 import caosdb as db
 from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
 import pytest
 from caosadvancedtools.models.parser import parse_model_from_yaml
 import yaml
 
-# TODO is not yet merged in caosadvancedtools
-#from caosadvancedtools.testutils import clear_database, set_test_key
-# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+from caosdb.utils.register_tests import clear_database, set_test_key
+set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 
 
 def rfp(*pathcomponents):
@@ -54,11 +52,6 @@ def rfp(*pathcomponents):
     return os.path.join(os.path.dirname(__file__), *pathcomponents)
 
 
-@pytest.fixture
-def clear_database():
-    db.execute_query("FIND Entity").delete()
-
-
 @pytest.fixture
 def usemodel():
     model = parse_model_from_yaml(rfp("model.yml"))
@@ -108,12 +101,21 @@ def crawler_extended(ident):
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, cfood="scifolder_extended.yml")
     # correct paths for current working directory
-    file_list = [r for r in cr.target_data if r.role == "File"]
+    file_list = [r for r in cr.crawled_data if r.role == "File"]
     for f in file_list:
         f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     return cr
 
 
+def test_ambigious_lookup(clear_database, usemodel, crawler, ident):
+    ins, ups = crawler.synchronize()
+
+    proj = db.execute_query("FIND Project WITH identifier='SpeedOfLight'", unique=True)
+    with pytest.raises(RuntimeError, match=".*unambigiously.*"):
+        print(crawler.identifiableAdapter.retrieve_identified_record_for_identifiable(
+            Identifiable(properties={'project': proj.id})))
+
+
 def test_single_insertion(clear_database, usemodel, crawler, ident):
     ins, ups = crawler.synchronize()
 
@@ -122,14 +124,15 @@ def test_single_insertion(clear_database, usemodel, crawler, ident):
     for i in reversed(range(len(res))):
         if res[i].parents[0].name == "PyTestInfo":
             del res[i]
-    filename = rfp("..", "..", "unittests", "records.xml")
-    with open(filename, "w") as f:
-        xml = res.to_xml()
-        # Remove noscript and transaction benchmark:
-        for tag in ("noscript", "TransactionBenchmark"):
-            if xml.find(tag) is not None:
-                xml.remove(xml.find(tag))
-        f.write(db.common.utils.xml2str(xml))
+    # uncomment this to recreate the `records.xml` file
+    # filename = rfp("..", "..", "unittests", "records.xml")
+    # with open(filename, "w") as f:
+    #    xml = res.to_xml()
+    #    # Remove noscript and transaction benchmark:
+    #    for tag in ("noscript", "TransactionBenchmark"):
+    #        if xml.find(tag) is not None:
+    #            xml.remove(xml.find(tag))
+    #    f.write(db.common.utils.xml2str(xml))
 
     assert len(ins) == 18
     assert len(ups) == 0
@@ -160,7 +163,7 @@ def test_insertion(clear_database, usemodel, ident, crawler):
     # Do a second run on the same data, there should a new insert:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.target_data) == 3
+    assert len(cr.crawled_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 1
     assert len(ups) == 0
@@ -168,7 +171,7 @@ def test_insertion(clear_database, usemodel, ident, crawler):
     # Do it again to check whether nothing is changed:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.target_data) == 3
+    assert len(cr.crawled_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 0
     assert len(ups) == 0
@@ -180,7 +183,7 @@ def test_insert_auth(clear_database, usemodel, ident, crawler):
     # Do a second run on the same data, there should a new insert:
     cr = Crawler(debug=True, identifiableAdapter=ident, securityMode=SecurityMode.RETRIEVE)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.target_data) == 3
+    assert len(cr.crawled_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 1
     assert not ins[0].is_valid()
@@ -190,7 +193,7 @@ def test_insert_auth(clear_database, usemodel, ident, crawler):
     # Do it again to check whether nothing is changed:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_insert")
-    assert len(cr.target_data) == 3
+    assert len(cr.crawled_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 0
     assert len(ups) == 0
@@ -205,9 +208,9 @@ def test_insertion_and_update(clear_database, usemodel, ident, crawler):
 
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, "example_overwrite_1")
-    # print(cr.target_data)
+    # print(cr.crawled_data)
     # cr.save_debug_data(rfp("provenance.yml"))
-    assert len(cr.target_data) == 3
+    assert len(cr.crawled_data) == 3
     ins, ups = cr.synchronize()
     assert len(ins) == 0
     assert len(ups) == 1
@@ -222,7 +225,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     crawl_standard_test_directory(cr)
 
     # Test the addition of a single property:
-    l = cr.target_data
+    l = cr.crawled_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -238,7 +241,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     # Test the change within one property:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr)
-    l = cr.target_data
+    l = cr.crawled_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -252,7 +255,7 @@ def test_identifiable_update(clear_database, usemodel, ident, crawler):
     # Changing the date should result in a new insertion:
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr)
-    l = cr.target_data
+    l = cr.crawled_data
     for record in l:
         if (record.parents[0].name == "Measurement" and
                 record.get_property("date").value == "2020-01-03"):
@@ -269,7 +272,7 @@ def test_file_insertion_dry(clear_database, usemodel, ident):
     crawler_extended = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(
         crawler_extended, cfood="scifolder_extended.yml")
-    file_list = [r for r in crawler_extended.target_data if r.role == "File"]
+    file_list = [r for r in crawler_extended.crawled_data if r.role == "File"]
     assert len(file_list) == 11
 
     for f in file_list:
@@ -305,7 +308,7 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
     cr = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr, cfood="scifolder_extended.yml")
 
-    file_list = [r for r in cr.target_data if r.role == "File"]
+    file_list = [r for r in cr.crawled_data if r.role == "File"]
     for f in file_list:
         f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     ins2, ups2 = cr.synchronize(commit_changes=True)
@@ -320,7 +323,7 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
     cr2 = Crawler(debug=True, identifiableAdapter=ident)
     crawl_standard_test_directory(cr2, cfood="scifolder_extended2.yml")
 
-    file_list = [r for r in cr2.target_data if r.role == "File"]
+    file_list = [r for r in cr2.crawled_data if r.role == "File"]
     for f in file_list:
         f.file = rfp("..", "..", "unittests", "test_directories", f.file)
     ins3, ups3 = cr2.synchronize(commit_changes=True)
diff --git a/integrationtests/test-profile/custom/other/restore/caosroot.example.tar.gz b/integrationtests/test-profile/custom/other/restore/caosroot.example.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..5e02a693960c64d3c82401e2de4abd72f7fd5fd1
Binary files /dev/null and b/integrationtests/test-profile/custom/other/restore/caosroot.example.tar.gz differ
diff --git a/integrationtests/test-profile/custom/other/restore/restore.dump.sql b/integrationtests/test-profile/custom/other/restore/restore.dump.sql
new file mode 100644
index 0000000000000000000000000000000000000000..eebed86273bbea2e343842d7ac6f6771709373e1
--- /dev/null
+++ b/integrationtests/test-profile/custom/other/restore/restore.dump.sql
@@ -0,0 +1,3189 @@
+-- MySQL dump 10.16  Distrib 10.1.38-MariaDB, for debian-linux-gnu (x86_64)
+--
+-- Host: sqldb    Database: caosdb
+-- ------------------------------------------------------
+-- Server version	10.4.2-MariaDB-1:10.4.2+maria~bionic
+
+/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
+/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
+/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
+/*!40101 SET NAMES utf8 */;
+/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
+/*!40103 SET TIME_ZONE='+00:00' */;
+/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
+/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
+/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
+/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
+
+--
+-- Table structure for table `collection_type`
+--
+
+DROP TABLE IF EXISTS `collection_type`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `collection_type` (
+  `domain_id` int(10) unsigned NOT NULL,
+  `entity_id` int(10) unsigned NOT NULL,
+  `property_id` int(10) unsigned NOT NULL,
+  `collection` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `entity_id` (`entity_id`),
+  KEY `property_id` (`property_id`),
+  CONSTRAINT `collection_type_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `collection_type_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `collection_type_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `collection_type`
+--
+
+LOCK TABLES `collection_type` WRITE;
+/*!40000 ALTER TABLE `collection_type` DISABLE KEYS */;
+/*!40000 ALTER TABLE `collection_type` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `data_type`
+--
+
+DROP TABLE IF EXISTS `data_type`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `data_type` (
+  `domain_id` int(10) unsigned NOT NULL,
+  `entity_id` int(10) unsigned NOT NULL,
+  `property_id` int(10) unsigned NOT NULL,
+  `datatype` int(10) unsigned NOT NULL,
+  UNIQUE KEY `datatype_ukey` (`domain_id`,`entity_id`,`property_id`),
+  KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `datatype_forkey_ent` (`entity_id`),
+  KEY `datatype_forkey_pro` (`property_id`),
+  KEY `datatype_forkey_type` (`datatype`),
+  CONSTRAINT `datatype_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `datatype_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `datatype_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `datatype_forkey_type` FOREIGN KEY (`datatype`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `data_type`
+--
+
+LOCK TABLES `data_type` WRITE;
+/*!40000 ALTER TABLE `data_type` DISABLE KEYS */;
+INSERT INTO `data_type` VALUES (0,0,20,14),(0,0,21,14),(0,0,24,14);
+/*!40000 ALTER TABLE `data_type` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `date_data`
+--
+
+DROP TABLE IF EXISTS `date_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `date_data` (
+  `domain_id` int(10) unsigned DEFAULT NULL,
+  `entity_id` int(10) unsigned DEFAULT NULL,
+  `property_id` int(10) unsigned DEFAULT NULL,
+  `value` int(11) NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `date_data_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `date_ov_forkey_ent` (`entity_id`),
+  KEY `date_ov_forkey_pro` (`property_id`),
+  CONSTRAINT `date_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `date_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `date_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `date_data`
+--
+
+LOCK TABLES `date_data` WRITE;
+/*!40000 ALTER TABLE `date_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `date_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `datetime_data`
+--
+
+DROP TABLE IF EXISTS `datetime_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `datetime_data` (
+  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
+  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  `value_ns` int(10) unsigned DEFAULT NULL,
+  `value` bigint(20) NOT NULL,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `dat_entity_id_entity` (`entity_id`),
+  KEY `dat_property_id_entity` (`property_id`),
+  CONSTRAINT `dat_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `dat_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `dat_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `datetime_data`
+--
+
+LOCK TABLES `datetime_data` WRITE;
+/*!40000 ALTER TABLE `datetime_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `datetime_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `desc_overrides`
+--
+
+DROP TABLE IF EXISTS `desc_overrides`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `desc_overrides` (
+  `domain_id` int(10) unsigned DEFAULT NULL,
+  `entity_id` int(10) unsigned DEFAULT NULL,
+  `property_id` int(10) unsigned DEFAULT NULL,
+  `description` text COLLATE utf8_unicode_ci DEFAULT NULL,
+  UNIQUE KEY `desc_ov_ukey` (`domain_id`,`entity_id`,`property_id`),
+  KEY `desc_ov_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `desc_ov_forkey_ent` (`entity_id`),
+  KEY `desc_ov_forkey_pro` (`property_id`),
+  CONSTRAINT `desc_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `desc_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `desc_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `desc_overrides`
+--
+
+LOCK TABLES `desc_overrides` WRITE;
+/*!40000 ALTER TABLE `desc_overrides` DISABLE KEYS */;
+/*!40000 ALTER TABLE `desc_overrides` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `double_data`
+--
+
+DROP TABLE IF EXISTS `double_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `double_data` (
+  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
+  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
+  `value` double NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  `unit_sig` bigint(20) DEFAULT NULL,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `dou_entity_id_entity` (`entity_id`),
+  KEY `dou_property_id_entity` (`property_id`),
+  CONSTRAINT `dou_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `dou_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `dou_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `double_data`
+--
+
+LOCK TABLES `double_data` WRITE;
+/*!40000 ALTER TABLE `double_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `double_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `entities`
+--
+
+DROP TABLE IF EXISTS `entities`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `entities` (
+  `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT 'Unique identifier.',
+  `name` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'Name of the entity.',
+  `description` text COLLATE utf8_unicode_ci DEFAULT NULL,
+  `role` enum('RECORDTYPE','RECORD','FILE','DOMAIN','PROPERTY','DATATYPE','ROLE','QUERYTEMPLATE') COLLATE utf8_unicode_ci NOT NULL,
+  `acl` int(10) unsigned DEFAULT NULL COMMENT 'Access Control List for the entity.',
+  PRIMARY KEY (`id`),
+  KEY `entity_entity_acl` (`acl`),
+  CONSTRAINT `entity_entity_acl` FOREIGN KEY (`acl`) REFERENCES `entity_acl` (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=100 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `entities`
+--
+
+LOCK TABLES `entities` WRITE;
+/*!40000 ALTER TABLE `entities` DISABLE KEYS */;
+INSERT INTO `entities` VALUES (0,'DOMAIN','The default domain.','ROLE',0),(1,'RECORDTYPE','The default recordtype.','ROLE',0),(2,'RECORD','The default record.','ROLE',0),(3,'FILE','The default file.','ROLE',0),(4,'PROPERTY','The default property.','ROLE',0),(7,'DATATYPE','The default datatype.','ROLE',0),(8,'QUERYTEMPLATE','The QueryTemplate role.','ROLE',0),(11,'REFERENCE','The default reference data type.','DATATYPE',0),(12,'INTEGER','The default integer data type.','DATATYPE',0),(13,'DOUBLE','The default double data type.','DATATYPE',0),(14,'TEXT','The default text data type.','DATATYPE',0),(15,'DATETIME','The default datetime data type.','DATATYPE',0),(16,'TIMESPAN','The default timespan data type.','DATATYPE',0),(17,'FILE','The default file reference data type.','DATATYPE',0),(18,'BOOLEAN','The defaulf boolean data type','DATATYPE',0),(20,'name','Name of an entity','PROPERTY',0),(21,'unit','Unit of an entity.','PROPERTY',0),(24,'description','Description of an entity.','PROPERTY',0),(50,'SQLITE','The SQLite file data type.','DATATYPE',0),(99,NULL,NULL,'RECORDTYPE',0);
+/*!40000 ALTER TABLE `entities` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `entity_acl`
+--
+
+DROP TABLE IF EXISTS `entity_acl`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `entity_acl` (
+  `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
+  `acl` varbinary(65525) NOT NULL,
+  PRIMARY KEY (`id`),
+  KEY `entity_acl_acl` (`acl`(3072))
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `entity_acl`
+--
+
+LOCK TABLES `entity_acl` WRITE;
+/*!40000 ALTER TABLE `entity_acl` DISABLE KEYS */;
+INSERT INTO `entity_acl` VALUES (0,'');
+/*!40000 ALTER TABLE `entity_acl` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `enum_data`
+--
+
+DROP TABLE IF EXISTS `enum_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `enum_data` (
+  `domain_id` int(10) unsigned DEFAULT NULL,
+  `entity_id` int(10) unsigned DEFAULT NULL,
+  `property_id` int(10) unsigned DEFAULT NULL,
+  `value` varbinary(255) NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `enum_ov_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `enum_ov_forkey_ent` (`entity_id`),
+  KEY `enum_ov_forkey_pro` (`property_id`),
+  CONSTRAINT `enum_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `enum_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `enum_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `enum_data`
+--
+
+LOCK TABLES `enum_data` WRITE;
+/*!40000 ALTER TABLE `enum_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `enum_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `files`
+--
+
+DROP TABLE IF EXISTS `files`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `files` (
+  `file_id` int(10) unsigned NOT NULL COMMENT 'The file''s ID.',
+  `path` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Directory of the file.',
+  `size` bigint(20) unsigned NOT NULL COMMENT 'Size in kB (oktet bytes).',
+  `hash` binary(64) DEFAULT NULL,
+  `checked_timestamp` bigint(20) NOT NULL DEFAULT 0,
+  PRIMARY KEY (`file_id`),
+  CONSTRAINT `fil_file_id_entity` FOREIGN KEY (`file_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `files`
+--
+
+LOCK TABLES `files` WRITE;
+/*!40000 ALTER TABLE `files` DISABLE KEYS */;
+/*!40000 ALTER TABLE `files` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `integer_data`
+--
+
+DROP TABLE IF EXISTS `integer_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `integer_data` (
+  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
+  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
+  `value` bigint(20) NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  `unit_sig` bigint(20) DEFAULT NULL,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `int_entity_id_entity` (`entity_id`),
+  KEY `int_property_id_entity` (`property_id`),
+  CONSTRAINT `int_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `int_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `int_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `integer_data`
+--
+
+LOCK TABLES `integer_data` WRITE;
+/*!40000 ALTER TABLE `integer_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `integer_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `isa_cache`
+--
+
+DROP TABLE IF EXISTS `isa_cache`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `isa_cache` (
+  `child` int(10) unsigned NOT NULL,
+  `parent` int(10) unsigned NOT NULL,
+  `rpath` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  PRIMARY KEY (`child`,`parent`,`rpath`),
+  KEY `isa_cache_parent_entity` (`parent`),
+  CONSTRAINT `isa_cache_child_entity` FOREIGN KEY (`child`) REFERENCES `entities` (`id`),
+  CONSTRAINT `isa_cache_parent_entity` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `isa_cache`
+--
+
+LOCK TABLES `isa_cache` WRITE;
+/*!40000 ALTER TABLE `isa_cache` DISABLE KEYS */;
+/*!40000 ALTER TABLE `isa_cache` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `logging`
+--
+
+DROP TABLE IF EXISTS `logging`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `logging` (
+  `level` int(11) NOT NULL,
+  `logger` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  `message` mediumtext COLLATE utf8_unicode_ci NOT NULL,
+  `millis` bigint(20) NOT NULL,
+  `logRecord` blob NOT NULL
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `logging`
+--
+
+LOCK TABLES `logging` WRITE;
+/*!40000 ALTER TABLE `logging` DISABLE KEYS */;
+/*!40000 ALTER TABLE `logging` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `name_data`
+--
+
+DROP TABLE IF EXISTS `name_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `name_data` (
+  `domain_id` int(10) unsigned NOT NULL,
+  `entity_id` int(10) unsigned NOT NULL,
+  `property_id` int(10) unsigned NOT NULL,
+  `value` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL,
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `entity_id` (`entity_id`),
+  KEY `property_id` (`property_id`),
+  KEY `value` (`value`),
+  CONSTRAINT `name_data_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `name_data_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `name_data_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `name_data`
+--
+
+LOCK TABLES `name_data` WRITE;
+/*!40000 ALTER TABLE `name_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `name_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `name_overrides`
+--
+
+DROP TABLE IF EXISTS `name_overrides`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `name_overrides` (
+  `domain_id` int(10) unsigned DEFAULT NULL,
+  `entity_id` int(10) unsigned DEFAULT NULL,
+  `property_id` int(10) unsigned DEFAULT NULL,
+  `name` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
+  UNIQUE KEY `name_ov_ukey` (`domain_id`,`entity_id`,`property_id`),
+  KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `name_ov_forkey_ent` (`entity_id`),
+  KEY `name_ov_forkey_pro` (`property_id`),
+  CONSTRAINT `name_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `name_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `name_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `name_overrides`
+--
+
+LOCK TABLES `name_overrides` WRITE;
+/*!40000 ALTER TABLE `name_overrides` DISABLE KEYS */;
+/*!40000 ALTER TABLE `name_overrides` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `null_data`
+--
+
+DROP TABLE IF EXISTS `null_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `null_data` (
+  `domain_id` int(10) unsigned DEFAULT NULL,
+  `entity_id` int(10) unsigned DEFAULT NULL,
+  `property_id` int(10) unsigned DEFAULT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') COLLATE utf8_unicode_ci DEFAULT NULL,
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `null_data_dom_ent_idx` (`domain_id`,`entity_id`),
+  KEY `null_forkey_ent` (`entity_id`),
+  KEY `null_forkey_pro` (`property_id`),
+  CONSTRAINT `null_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `null_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `null_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `null_data`
+--
+
+LOCK TABLES `null_data` WRITE;
+/*!40000 ALTER TABLE `null_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `null_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `passwd`
+--
+
+DROP TABLE IF EXISTS `passwd`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `passwd` (
+  `principal` varbinary(255) NOT NULL,
+  `hash` varbinary(255) NOT NULL,
+  `alg` varchar(255) COLLATE utf8_unicode_ci DEFAULT 'SHA-512',
+  `it` int(10) unsigned DEFAULT 5000,
+  `salt` varbinary(255) NOT NULL,
+  PRIMARY KEY (`principal`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `passwd`
+--
+
+LOCK TABLES `passwd` WRITE;
+/*!40000 ALTER TABLE `passwd` DISABLE KEYS */;
+/*!40000 ALTER TABLE `passwd` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `permissions`
+--
+
+DROP TABLE IF EXISTS `permissions`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `permissions` (
+  `role` varbinary(255) NOT NULL,
+  `permissions` mediumtext COLLATE utf8_unicode_ci NOT NULL,
+  PRIMARY KEY (`role`),
+  CONSTRAINT `perm_name_roles` FOREIGN KEY (`role`) REFERENCES `roles` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `permissions`
+--
+
+LOCK TABLES `permissions` WRITE;
+/*!40000 ALTER TABLE `permissions` DISABLE KEYS */;
+INSERT INTO `permissions` VALUES ('administration','[{\"grant\":\"true\",\"priority\":\"true\",\"permission\":\"*\"}]');
+/*!40000 ALTER TABLE `permissions` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `query_template_def`
+--
+
+DROP TABLE IF EXISTS `query_template_def`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `query_template_def` (
+  `id` int(10) unsigned NOT NULL,
+  `definition` mediumtext COLLATE utf8_unicode_ci NOT NULL,
+  PRIMARY KEY (`id`),
+  CONSTRAINT `query_template_def_ibfk_1` FOREIGN KEY (`id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `query_template_def`
+--
+
+LOCK TABLES `query_template_def` WRITE;
+/*!40000 ALTER TABLE `query_template_def` DISABLE KEYS */;
+/*!40000 ALTER TABLE `query_template_def` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `reference_data`
+--
+
+DROP TABLE IF EXISTS `reference_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `reference_data` (
+  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
+  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
+  `value` int(10) unsigned NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `entity_id` (`entity_id`,`property_id`),
+  KEY `ref_domain_id_entity` (`domain_id`),
+  KEY `ref_property_id_entity` (`property_id`),
+  KEY `ref_value_entity` (`value`),
+  CONSTRAINT `ref_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `ref_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `ref_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `ref_value_entity` FOREIGN KEY (`value`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `reference_data`
+--
+
+LOCK TABLES `reference_data` WRITE;
+/*!40000 ALTER TABLE `reference_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `reference_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `roles`
+--
+
+DROP TABLE IF EXISTS `roles`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `roles` (
+  `name` varbinary(255) NOT NULL,
+  `description` mediumtext COLLATE utf8_unicode_ci DEFAULT NULL,
+  PRIMARY KEY (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `roles`
+--
+
+LOCK TABLES `roles` WRITE;
+/*!40000 ALTER TABLE `roles` DISABLE KEYS */;
+INSERT INTO `roles` VALUES ('administration','Users with this role have unrestricted permissions.'),('anonymous','Users who did not authenticate themselves.');
+/*!40000 ALTER TABLE `roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `rules`
+--
+
+DROP TABLE IF EXISTS `rules`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `rules` (
+  `domain_id` int(10) unsigned NOT NULL,
+  `entity_id` int(10) unsigned NOT NULL,
+  `transaction` enum('INSERT','RETRIEVE','UPDATE','DELETE') COLLATE utf8_unicode_ci NOT NULL,
+  `criterion` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  `modus` enum('MUST','SHOULD','SHOULDNT','MUSTNOT') COLLATE utf8_unicode_ci NOT NULL,
+  KEY `rule_entity_id_entity` (`entity_id`),
+  KEY `rule_domain_id_entity` (`domain_id`),
+  CONSTRAINT `rule_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `rule_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `rules`
+--
+
+LOCK TABLES `rules` WRITE;
+/*!40000 ALTER TABLE `rules` DISABLE KEYS */;
+INSERT INTO `rules` VALUES (0,0,'INSERT','CheckPropValid','MUST'),(0,0,'INSERT','CheckParValid','MUST'),(0,0,'INSERT','CheckParOblPropPresent','SHOULD'),(0,0,'INSERT','CheckValueParsable','MUST'),(0,0,'UPDATE','CheckPropValid','MUST'),(0,0,'UPDATE','CheckParValid','MUST'),(0,0,'UPDATE','CheckParOblPropPresent','SHOULD'),(0,0,'UPDATE','CheckValueParsable','MUST'),(0,0,'DELETE','CheckReferenceDependencyExistent','MUST'),(0,0,'DELETE','CheckChildDependencyExistent','MUST'),(0,1,'INSERT','CheckNamePresent','MUST'),(0,1,'INSERT','CheckPropPresent','SHOULD'),(0,1,'INSERT','SetImpToRecByDefault','MUST'),(0,1,'UPDATE','CheckNamePresent','MUST'),(0,1,'UPDATE','CheckPropPresent','SHOULD'),(0,1,'UPDATE','SetImpToRecByDefault','MUST'),(0,2,'INSERT','CheckNamePresent','SHOULD'),(0,2,'INSERT','CheckPropPresent','SHOULD'),(0,2,'INSERT','CheckParPresent','MUST'),(0,2,'INSERT','SetImpToFix','MUST'),(0,2,'UPDATE','CheckNamePresent','SHOULD'),(0,2,'UPDATE','CheckPropPresent','SHOULD'),(0,2,'UPDATE','CheckParPresent','MUST'),(0,2,'UPDATE','SetImpToFix','MUST'),(0,3,'INSERT','CheckNamePresent','SHOULD'),(0,3,'INSERT','MatchFileProp','MUST'),(0,3,'INSERT','CheckTargetPathValid','MUST'),(0,3,'INSERT','SetImpToFix','MUST'),(0,3,'UPDATE','CheckNamePresent','SHOULD'),(0,3,'UPDATE','MatchFileProp','MUST'),(0,3,'UPDATE','CheckTargetPathValid','MUST'),(0,3,'UPDATE','SetImpToFix','MUST'),(0,4,'INSERT','CheckDatatypePresent','MUST'),(0,4,'UPDATE','CheckDatatypePresent','MUST'),(0,4,'INSERT','CheckNamePresent','MUST'),(0,4,'UPDATE','CheckNamePresent','MUST'),(0,4,'INSERT','SetImpToFix','MUST'),(0,4,'UPDATE','SetImpToFix','MUST'),(0,8,'UPDATE','CheckQueryTemplate','MUST'),(0,8,'INSERT','CheckQueryTemplate','MUST'),(0,11,'INSERT','CheckRefidPresent','SHOULD'),(0,11,'INSERT','CheckRefidValid','MUST'),(0,11,'INSERT','CheckRefidIsaParRefid','SHOULD'),(0,11,'UPDATE','CheckRefidPresent','SHOULD'),(0,11,'UPDATE','CheckRefidValid','MUST'),(0,11,'UPDATE','CheckRefidIsaParRefid','SHOULD'),(0,12,'INSERT','CheckUnitPresent','SHOULD'),(0,12,'INSERT','ParseUnit','SHOULD'),(0,12,'UPDATE','CheckUnitPresent','SHOULD'),(0,12,'UPDATE','ParseUnit','SHOULD'),(0,13,'INSERT','CheckUnitPresent','SHOULD'),(0,13,'INSERT','ParseUnit','SHOULD'),(0,13,'UPDATE','CheckUnitPresent','SHOULD'),(0,13,'UPDATE','ParseUnit','SHOULD'),(0,16,'INSERT','CheckUnitPresent','SHOULD'),(0,16,'UPDATE','CheckUnitPresent','SHOULD'),(0,17,'INSERT','CheckRefidValid','MUST'),(0,17,'INSERT','CheckRefidIsaParRefid','MUST'),(0,17,'UPDATE','CheckRefidValid','MUST'),(0,17,'UPDATE','CheckRefidIsaParRefid','MUST'),(0,50,'UPDATE','CheckRefidValid','MUST'),(0,50,'UPDATE','CheckRefidIsaParRefid','SHOULD'),(0,50,'UPDATE','SQLiteTransaction','MUST');
+/*!40000 ALTER TABLE `rules` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `stats`
+--
+
+DROP TABLE IF EXISTS `stats`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `stats` (
+  `name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
+  `value` blob DEFAULT NULL,
+  PRIMARY KEY (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `stats`
+--
+
+LOCK TABLES `stats` WRITE;
+/*!40000 ALTER TABLE `stats` DISABLE KEYS */;
+INSERT INTO `stats` VALUES ('TransactionBenchmark','��\0sr\00caosdb.server.database.misc.TransactionBenchmark�Cl=���E\0J\0sinceL\0acct\0Ljava/util/HashMap;L\0countsq\0~\0xp\0\0l���Wsr\0java.util.HashMap���`�\0F\0\nloadFactorI\0	thresholdxp?@\0\0\0\0\0w\0\0\0\0\0\0t\0	SyncStatssr\0java.lang.Long;��̏#�\0J\0valuexr\0java.lang.Number������\0\0xp\0\0\0\0\0\0\0t\0GetInfosq\0~\0\0\0\0\0\0\0 xsq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0q\0~\0sr\0java.lang.Integer⠤���8\0I\0valuexq\0~\0\0\0\0q\0~\0	sq\0~\0\0\0\0x');
+/*!40000 ALTER TABLE `stats` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `text_data`
+--
+
+DROP TABLE IF EXISTS `text_data`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `text_data` (
+  `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.',
+  `property_id` int(10) unsigned NOT NULL COMMENT 'Property.',
+  `value` text COLLATE utf8_unicode_ci NOT NULL,
+  `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') COLLATE utf8_unicode_ci NOT NULL COMMENT 'Status of this statement.',
+  `pidx` int(10) unsigned NOT NULL DEFAULT 0,
+  KEY `domain_id` (`domain_id`,`entity_id`),
+  KEY `str_entity_id_entity` (`entity_id`),
+  KEY `str_property_id_entity` (`property_id`),
+  CONSTRAINT `str_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `str_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`),
+  CONSTRAINT `str_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `text_data`
+--
+
+LOCK TABLES `text_data` WRITE;
+/*!40000 ALTER TABLE `text_data` DISABLE KEYS */;
+/*!40000 ALTER TABLE `text_data` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `transaction_log`
+--
+
+DROP TABLE IF EXISTS `transaction_log`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `transaction_log` (
+  `transaction` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Transaction.',
+  `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity ID.',
+  `username` varbinary(255) NOT NULL,
+  `seconds` bigint(20) unsigned NOT NULL DEFAULT 0,
+  `nanos` int(10) unsigned NOT NULL DEFAULT 0,
+  `realm` varbinary(255) NOT NULL,
+  KEY `entity_id` (`entity_id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `transaction_log`
+--
+
+LOCK TABLES `transaction_log` WRITE;
+/*!40000 ALTER TABLE `transaction_log` DISABLE KEYS */;
+/*!40000 ALTER TABLE `transaction_log` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `units_lin_con`
+--
+
+DROP TABLE IF EXISTS `units_lin_con`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `units_lin_con` (
+  `signature_from` bigint(20) NOT NULL,
+  `signature_to` bigint(20) NOT NULL,
+  `a` decimal(65,30) NOT NULL,
+  `b_dividend` int(11) NOT NULL,
+  `b_divisor` int(11) NOT NULL,
+  `c` decimal(65,30) NOT NULL,
+  PRIMARY KEY (`signature_from`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `units_lin_con`
+--
+
+LOCK TABLES `units_lin_con` WRITE;
+/*!40000 ALTER TABLE `units_lin_con` DISABLE KEYS */;
+/*!40000 ALTER TABLE `units_lin_con` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `user_info`
+--
+
+DROP TABLE IF EXISTS `user_info`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_info` (
+  `realm` varbinary(255) NOT NULL,
+  `name` varbinary(255) NOT NULL,
+  `email` varbinary(255) DEFAULT NULL,
+  `status` enum('ACTIVE','INACTIVE') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'INACTIVE',
+  `entity` int(10) unsigned DEFAULT NULL,
+  PRIMARY KEY (`realm`,`name`),
+  KEY `subject_entity` (`entity`),
+  CONSTRAINT `subjects_ibfk_1` FOREIGN KEY (`entity`) REFERENCES `entities` (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_info`
+--
+
+LOCK TABLES `user_info` WRITE;
+/*!40000 ALTER TABLE `user_info` DISABLE KEYS */;
+/*!40000 ALTER TABLE `user_info` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Table structure for table `user_roles`
+--
+
+DROP TABLE IF EXISTS `user_roles`;
+/*!40101 SET @saved_cs_client     = @@character_set_client */;
+/*!40101 SET character_set_client = utf8 */;
+CREATE TABLE `user_roles` (
+  `realm` varbinary(255) NOT NULL,
+  `user` varbinary(255) NOT NULL,
+  `role` varbinary(255) NOT NULL,
+  PRIMARY KEY (`realm`,`user`,`role`),
+  KEY `user_roles_ibfk_1` (`role`),
+  CONSTRAINT `user_roles_ibfk_1` FOREIGN KEY (`role`) REFERENCES `roles` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
+/*!40101 SET character_set_client = @saved_cs_client */;
+
+--
+-- Dumping data for table `user_roles`
+--
+
+LOCK TABLES `user_roles` WRITE;
+/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */;
+/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */;
+UNLOCK TABLES;
+
+--
+-- Dumping routines for database 'caosdb'
+--
+/*!50003 DROP FUNCTION IF EXISTS `CaosDBVersion` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `CaosDBVersion`() RETURNS varchar(255) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+RETURN 'v2.1.1' ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `constructDateTimeWhereClauseForColumn` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `constructDateTimeWhereClauseForColumn`(seconds_col VARCHAR(255), nanos_col VARCHAR(255), vDateTimeSecLow VARCHAR(255), vDateTimeNSLow VARCHAR(255), vDateTimeSecUpp VARCHAR(255), vDateTimeNSUpp VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+BEGIN
+
+	DECLARE isInterval BOOLEAN DEFAULT vDateTimeSecUpp IS NOT NULL or vDateTimeNSUpp IS NOT NULL; 
+    DECLARE operator_prefix CHAR(1) DEFAULT LEFT(operator,1);
+
+	IF isInterval THEN
+		IF operator = '=' THEN
+			RETURN " 0=1";
+        ELSEIF operator = '!=' THEN
+			RETURN " 0=1";
+        ELSEIF operator = '>' or operator = '<=' THEN 
+            RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecUpp);
+        ELSEIF operator = '<' or operator = '>=' THEN 
+            RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecLow);
+		ELSEIF operator = "(" THEN
+            RETURN CONCAT(" ", seconds_col, ">=", vDateTimeSecLow, " AND ",seconds_col, "<", vDateTimeSecUpp);
+		ELSEIF operator = "!(" THEN
+            RETURN CONCAT(" ", seconds_col, "<", vDateTimeSecLow, " OR ", seconds_col, ">=", vDateTimeSecUpp);
+		END IF;
+    ELSE
+        IF operator = '=' THEN
+            RETURN CONCAT(" ",
+				seconds_col,
+				"=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, CONCAT(' AND ', nanos_col, ' IS NULL'), CONCAT(' AND ',
+					nanos_col,
+				'=', vDateTimeNSLow)));
+        ELSEIF operator = '!=' THEN
+            RETURN CONCAT(" ",
+				seconds_col,
+				"!=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR ',
+						nanos_col,
+						'!=', vDateTimeNSLow)));
+        ELSEIF operator = '>' or operator = '<' THEN
+            RETURN CONCAT(" ",
+				seconds_col, operator, vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR (',seconds_col,'=', vDateTimeSecLow, ' AND ',nanos_col, operator, vDateTimeNSLow, ')')));
+        ELSEIF operator = '>=' or operator = '<=' THEN
+            RETURN CONCAT(" ",seconds_col, operator, vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' AND (',seconds_col, operator_prefix, vDateTimeSecLow, ' OR ',nanos_col, operator, vDateTimeNSLow, ')')));
+		ELSEIF operator = "(" THEN
+            RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"=", vDateTimeSecLow),CONCAT(" ",seconds_col,"=",vDateTimeSecLow," AND ",nanos_col,"=",vDateTimeNSLow));
+		ELSEIF operator = "!(" THEN
+            RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"!=",vDateTimeSecLow, ""),CONCAT(" ",seconds_col,"!=",vDateTimeSecLow," OR ",nanos_col, " IS NULL OR ", nanos_col, "!=",vDateTimeNSLow));
+        END IF;
+    END IF;
+    return ' 0=1';
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `convert_unit` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `convert_unit`(unit_sig BIGINT, value DECIMAL(65,30)) RETURNS decimal(65,30)
+    DETERMINISTIC
+BEGIN
+    DECLARE ret DECIMAL(65,30) DEFAULT value;
+
+    SELECT (((value+a)*b_dividend)/b_divisor+c) INTO ret FROM units_lin_con WHERE signature_from=unit_sig;
+    RETURN ret;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `getAggValueWhereClause` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `getAggValueWhereClause`(entities VARCHAR(255), properties VARCHAR(255)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+BEGIN
+        RETURN CONCAT(" EXISTS (SELECT 1 FROM `", entities, "` AS ent WHERE ent.id = subdata.entity_id LIMIT 1)", IF(properties IS NOT NULL AND properties != '', CONCAT(" AND EXISTS (SELECT 1 FROM `", properties, "` as props WHERE props.id = subdata.property_id LIMIT 1)"),''));
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `getDateTimeWhereClause` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `getDateTimeWhereClause`(vDateTime VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+BEGIN
+	DECLARE sep_loc INTEGER DEFAULT LOCATE('--',vDateTime);
+    DECLARE vDateTimeLow VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',1), vDateTime);
+    DECLARE vDateTimeUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',-1), NULL);
+	
+    DECLARE vDateTimeSecLow VARCHAR(255) DEFAULT SUBSTRING_INDEX(vDateTimeLow, 'UTC', 1);
+    DECLARE vDateTimeNSLow VARCHAR(255) DEFAULT IF(SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1)='',NULL,SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1));
+	
+    DECLARE vDateTimeSecUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTimeUpp, 'UTC', 1), NULL);
+    DECLARE vDateTimeNSUpp VARCHAR(255) DEFAULT IF(sep_loc != 0 AND SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1)!='',SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1),NULL);
+    
+	
+	RETURN constructDateTimeWhereClauseForColumn("subdata.value", "subdata.value_ns", vDateTimeSecLow, vDateTimeNSLow, vDateTimeSecUpp, vDateTimeNSUpp, operator);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `getDateWhereClause` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `getDateWhereClause`(vDateTimeDotNotation VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+BEGIN
+	DECLARE isInterval INTEGER DEFAULT LOCATE('--',vDateTimeDotNotation);
+	DECLARE vILB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', 1), vDateTimeDotNotation);
+	DECLARE vEUB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', -1), NULL);
+	DECLARE vILB_Date INTEGER DEFAULT SUBSTRING_INDEX(vILB, '.', 1);
+	DECLARE vEUB_Date INTEGER DEFAULT SUBSTRING_INDEX(vEUB, '.', 1);
+    DECLARE hasTime INTEGER DEFAULT LOCATE('.NULL.NULL',vILB);
+    DECLARE dom INTEGER DEFAULT vILB_Date % 100;
+    DECLARE mon INTEGER DEFAULT ((vILB_Date % 10000) - dom) / 100;    
+    DECLARE yea INTEGER DEFAULT (vILB_Date - (vILB_Date%10000)) / 10000;
+    SELECT vILB_Date != vEUB_Date INTO isInterval;
+
+    IF operator = '=' and hasTime != 0 THEN
+        RETURN CONCAT(" subdata.value=", vILB_Date);
+    ELSEIF operator = "!=" and hasTime != 0 THEN
+        IF mon != 0  and dom != 0 THEN
+            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100!=0"); 
+        ELSEIF mon != 0 THEN
+            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100=0 and subdata.value%10000!=0");
+        ELSE
+            RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%10000=0");
+        END IF;
+    ELSEIF operator = "(" and hasTime != 0 THEN
+        IF mon != 0 and dom != 0 THEN
+            RETURN CONCAT(" subdata.value=", vILB_Date);
+        ELSEIF mon != 0 THEN
+            RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date, " and subdata.value%10000!=0)");
+        ELSE
+            RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date,")");
+        END IF;
+    ELSEIF operator = "!(" THEN
+        IF hasTime = 0 THEN
+            RETURN " 0=0";
+        END IF;
+        IF mon != 0 and dom != 0 THEN
+            RETURN CONCAT(" subdata.value!=",vILB_Date);
+        ELSEIF mon != 0 THEN
+            RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%100=0) OR ((subdata.value<", vILB_Date, " or subdata.value>", vEUB_Date, ") and subdata.value%100!=0)");
+        ELSE
+            RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%10000=0) OR ((subdata.value<", vILB_Date, " or subdata.value>=", vEUB_Date, ") and subdata.value%10000!=0)");
+        END IF;
+    ELSEIF operator = "<" THEN
+        IF mon != 0 and dom != 0 THEN
+            RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%100!=0 or (subdata.value<", yea*10000+mon*100, " and subdata.value%10000!=0) or (subdata.value<", yea*10000, " and subdata.value%10000=0))");
+        ELSEIF mon != 0 THEN
+            RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%10000!=0 or (subdata.value<", yea*10000, "))");
+        ELSE
+            RETURN CONCAT(" subdata.value<", vILB_Date);
+        END IF;
+    ELSEIF operator = ">" THEN
+        IF mon != 0 and dom != 0 THEN
+            RETURN CONCAT(" subdata.value>", vILB_Date);
+        ELSEIF mon != 0 THEN
+            RETURN CONCAT(" subdata.value>=",vEUB_Date);
+        ELSE
+            RETURN CONCAT(" subdata.value>=",vEUB_Date);
+        END IF;
+    END IF;
+
+    return ' 0=1';
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `getDoubleWhereClause` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `getDoubleWhereClause`(value DOUBLE, unit_sig BIGINT, valueStdUnit DECIMAL(65,30), stdUnit_sig BIGINT, o CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    DETERMINISTIC
+BEGIN
+    RETURN IF(unit_sig IS NULL AND value IS NOT NULL, 
+        CONCAT('subdata.value ', o, ' \'', value, '\''), 
+        CONCAT(
+            IF(value IS NULL, '', 
+                CONCAT('(subdata.unit_sig=', unit_sig, ' AND subdata.value ', o, ' \'', value, '\') OR ')), 
+        	IF(unit_sig = stdUnit_sig,'',CONCAT('(subdata.unit_sig=', stdUnit_sig,' AND subdata.value ', o, ' \'', valueStdUnit, '\') OR ')),'(standard_unit(subdata.unit_sig)=', stdUnit_sig,' AND convert_unit(subdata.unit_sig,subdata.value) ', o, ' ', valueStdUnit, ')')); 
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `makeStmt` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `makeStmt`(sourceSet VARCHAR(255), targetSet VARCHAR(255), data VARCHAR(20000), properties VARCHAR(20000)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci
+    NO SQL
+BEGIN		
+
+        RETURN CONCAT(
+            IF(targetSet IS NULL,
+                CONCAT('DELETE FROM `',sourceSet,'` WHERE NOT EXISTS (SELECT 1 FROM '), 
+                CONCAT('INSERT IGNORE INTO `',targetSet,'` (id) SELECT id FROM `',sourceSet,'` WHERE EXISTS (SELECT 1 FROM ')),
+            IF(properties IS NULL,
+                CONCAT(data,' as data WHERE '),
+                CONCAT('`',properties,'` as prop JOIN ',data,' as data ON (data.property_id=prop.id) WHERE (data.entity_id=prop.id2 OR prop.id2=0) AND ')),
+            'data.entity_id=`', sourceSet, '`.`id` LIMIT 1)'
+        );
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP FUNCTION IF EXISTS `standard_unit` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` FUNCTION `standard_unit`(unit_sig BIGINT) RETURNS bigint(20)
+    DETERMINISTIC
+BEGIN
+    DECLARE ret BIGINT DEFAULT unit_sig;
+
+    SELECT signature_to INTO ret FROM units_lin_con WHERE signature_from=unit_sig;
+    RETURN ret;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applyBackReference` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyBackReference`(in sourceSet VARCHAR(255), targetSet VARCHAR(255), in propertiesTable VARCHAR(255), in entitiesTable VARCHAR(255), in subQuery BOOLEAN)
+BEGIN
+	DECLARE newTableName VARCHAR(255) DEFAULT NULL;
+
+
+	IF subQuery IS TRUE THEN
+		call registerTempTableName(newTableName);
+
+		SET @createBackRefSubQueryTableStr = CONCAT('CREATE TEMPORARY TABLE `',newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (id, entity_id))');
+
+		PREPARE createBackRefSubQueryTable FROM @createBackRefSubQueryTableStr;
+		EXECUTE createBackRefSubQueryTable;
+		DEALLOCATE PREPARE createBackRefSubQueryTable;
+
+		SET @backRefSubResultSetStmtStr = CONCAT('INSERT INTO `',newTableName,'` (id,entity_id) SELECT entity_id AS id, value AS entity_id FROM `reference_data` AS data WHERE EXISTS (SELECT 1 FROM `',sourceSet,'` AS source WHERE source.id=data.value LIMIT 1)', IF(propertiesTable IS NULL,'',CONCAT(' AND EXISTS (SELECT 1 FROM `',propertiesTable,'` AS p WHERE p.id=data.property_id LIMIT 1)')), IF(entitiesTable IS NULL,'',CONCAT(' AND EXISTS (SELECT 1 FROM `',entitiesTable,'` AS e WHERE e.id=data.entity_id LIMIT 1)')));
+
+		PREPARE backRefSubResultSetStmt FROM @backRefSubResultSetStmtStr;
+		EXECUTE backRefSubResultSetStmt;
+		DEALLOCATE PREPARE backRefSubResultSetStmt;
+
+		SELECT newTableName as list;	
+	ELSE 
+	    IF targetSet IS NULL OR sourceSet = targetSet THEN
+        	SET @stmtBackRefStr = CONCAT('DELETE FROM `', sourceSet, '` WHERE NOT EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=`', sourceSet, '`.`id`', IF(entitiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', entitiesTable, '` AS e WHERE e.id=data.entity_id LIMIT 1)')), IF(propertiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', propertiesTable, '` AS p WHERE p.id=data.property_id LIMIT 1)')), ')');
+    	ELSE
+        	SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` (id) SELECT id FROM `',sourceSet,'` AS source WHERE EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id', IF(entitiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', entitiesTable, '` AS e WHERE e.id=data.entity_id LIMIT 1)')), IF(propertiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', propertiesTable, '` AS p WHERE p.id=data.property_id LIMIT 1)')), ')');
+    	END IF;
+    
+    	PREPARE stmtBackRef FROM @stmtBackRefStr;
+    	EXECUTE stmtBackRef;
+    	DEALLOCATE PREPARE stmtBackRef;
+	END IF;
+	
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applyIDFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyIDFilter`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in o CHAR(2), in vInt BIGINT, in agg CHAR(3))
+IDFILTER_LABEL: BEGIN
+DECLARE data VARCHAR(20000) DEFAULT NULL;
+DECLARE aggVal VARCHAR(255) DEFAULT NULL;
+
+
+IF agg IS NOT NULL THEN
+	SET @stmtIDAggValStr = CONCAT("SELECT ", agg, "(id) INTO @sAggVal FROM `", sourceSet, "`");
+	PREPARE stmtIDAggVal FROM @stmtIDAggValStr;
+	EXECUTE stmtIDAggVal;
+    DEALLOCATE PREPARE stmtIDAggVal;
+    SET aggVal = @sAggVal;
+END IF;
+
+
+IF targetSet IS NULL OR targetSet = sourceSet THEN
+    SET data = CONCAT("DELETE FROM `",sourceSet,"` WHERE ",IF(o IS NULL OR vInt IS NULL,"1=1",CONCAT("NOT id",o,vInt)),IF(aggVal IS NULL, "", CONCAT(" AND id!=",aggVal)));
+ELSE
+    SET data = CONCAT("INSERT IGNORE INTO `",targetSet,"` SELECT data.id as id FROM `",sourceSet,"` AS data WHERE ",IF(o IS NULL OR vInt IS NULL,"1=1",CONCAT("data.id",o,vInt)),IF(aggVal IS NULL, "", CONCAT(" AND data.id=", aggVal)));
+END IF;
+
+Set @stmtIDFilterStr = data;
+PREPARE stmtIDFilter FROM @stmtIDFilterStr;
+EXECUTE stmtIDFilter;
+DEALLOCATE PREPARE stmtIDFilter;
+	
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applyPOV` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyPOV`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in propertiesTable VARCHAR(255), in refIdsTable VARCHAR(255), in o CHAR(4), in vText VARCHAR(255), in vInt INT, in vDouble DOUBLE, in unit_sig BIGINT, in vDoubleStdUnit DOUBLE, in stdUnit_sig BIGINT, in vDateTime VARCHAR(255), in vDateTimeDotNotation VARCHAR(255), in agg CHAR(3), in pname VARCHAR(255))
+POV_LABEL: BEGIN
+    DECLARE data TEXT DEFAULT NULL; 
+    DECLARE sTextData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sEnumData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sIntData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sDoubleData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sDatetimeData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sNullData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sDateData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE sRefData VARCHAR(20000) DEFAULT NULL; 
+    DECLARE aggValue VARCHAR(255) DEFAULT NULL;
+    DECLARE aggValueWhereClause VARCHAR(20000) DEFAULT NULL;
+    DECLARE distinctUnits INT DEFAULT 0;
+    DECLARE usedStdUnit BIGINT DEFAULT NULL;
+    DECLARE keepTabl VARCHAR(255) DEFAULT NULL;
+
+    IF o = '->' THEN
+        
+        call applyRefPOV(sourceSet,targetSet, propertiesTable, refIdsTable);
+        LEAVE POV_LABEL;
+    ELSEIF o = '0' THEN
+        
+        
+        SET vText = NULL;
+        SET sTextData = 'SELECT domain_id, entity_id, property_id FROM `null_data` AS subdata';
+
+    ELSEIF o = '!0' THEN
+        
+        
+        SET vText = NULL;
+        SET sTextData = 'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata WHERE subdata.value IS NOT NULL UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata WHERE subdata.value IS NOT NULL';
+
+    ELSEIF o = "(" or o = "!(" THEN
+        SET sTextData = IF(vText IS NULL,' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ',getDateWhereClause(vDateTimeDotNotation,o))));
+        SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o))));
+        SET vText = NULL;
+    ELSEIF agg IS NOT NULL THEN
+        
+
+        
+        SET aggValueWhereClause = CONCAT(getDoubleWhereClause(vDouble, unit_sig, vDoubleStdUnit, stdUnit_sig, o), ' AND ');
+        SET aggValueWhereClause = CONCAT(IF(aggValueWhereClause IS NULL, '', aggValueWhereClause), getAggValueWhereClause(sourceSet, propertiesTable));
+
+        
+        SET @aggValueStmtStr = CONCAT('SELECT ',agg,'(subdata.value), ', agg, '(convert_unit(subdata.unit_sig,subdata.value)), COUNT(DISTINCT standard_unit(subdata.unit_sig)), max(standard_unit(subdata.unit_sig)) INTO @sAggValue, @sAggValueConvert, @distinctUnits, @StdUnitSig FROM (SELECT entity_id, property_id, value, unit_sig FROM `integer_data` UNION SELECT entity_id, property_id, value, unit_sig FROM `double_data`) AS subdata WHERE ', aggValueWhereClause);
+
+        
+        PREPARE stmtAggValueStmt FROM @aggValueStmtStr;
+        EXECUTE stmtAggValueStmt;
+        DEALLOCATE PREPARE stmtAggValueStmt;
+
+        SET distinctUnits = @distinctUnits;
+        SET aggValue = @sAggValue;
+
+        
+        IF distinctUnits = 1 THEN
+            SET aggValue = @sAggValueConvert;
+            SET usedStdUnit = @StdUnitSig;
+        ELSE
+            call raiseWarning(CONCAT("The filter POV(",IF(pname IS NULL, 'NULL', pname),",",IF(o IS NULL, 'NULL', o),",",IF(vText IS NULL, 'NULL', vText),") with the aggregate function '", agg, "' could not match the values against each other with their units. The values had different base units. Only their numric value had been taken into account." ));
+        END IF;
+
+        IF aggValue IS NULL THEN
+            SET sTextData = 'SELECT NULL as domain_id, NULL as entity_id, NULL as property_id';
+        ELSE
+            SET sTextData = '';
+            SET sIntData = CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '='));
+            SET sDoubleData = CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '='));
+        END IF;
+
+        SET vText = NULL;
+    ELSE
+        
+        SET sTextData = IF(vText IS NULL, 'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data`', CONCAT('SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata WHERE subdata.value ',o,' ?'));
+        SET sEnumData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data`', CONCAT(' UNION SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata WHERE subdata.value ', o, ' ?'));
+        SET sIntData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `integer_data` AS subdata', IF(vInt IS NULL AND vDoubleStdUnit IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o))));
+        SET sDoubleData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `double_data` AS subdata', IF(vDouble IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble,unit_sig,vDoubleStdUnit,stdUnit_sig,o))));
+        SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o))));
+        SET sDateData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ',getDateWhereClause(vDateTimeDotNotation,o))));
+        SET sRefData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data`', IF(refIdsTable IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata WHERE EXISTS (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1)')));
+        SET sNullData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `null_data`', NULL);
+
+    END IF;
+
+    SET data = CONCAT('(',sTextData,
+                IF(sEnumData IS NULL, '', sEnumData),
+                IF(sDoubleData IS NULL, '', sDoubleData),
+                IF(sIntData IS NULL, '', sIntData),
+                IF(sDatetimeData IS NULL, '', sDatetimeData),
+                IF(sDateData IS NULL, '', sDateData),
+                IF(sRefData IS NULL, '', sRefData),
+                IF(sNullData IS NULL, '', sNullData),
+                ')'
+            );
+
+
+    call createTmpTable(keepTabl);
+
+    
+    SET @stmtPOVkeepTblStr = CONCAT("INSERT IGNORE INTO `", keepTabl, "` (id) SELECT DISTINCT entity_id AS id FROM ", data, " as data", IF(propertiesTable IS NULL, '', CONCAT(' WHERE EXISTS (Select 1 from `', propertiesTable, '` AS prop WHERE prop.id = data.property_id AND (prop.id2=data.entity_id OR prop.id2=0))')));
+
+    SET @stmtPOVStr = CONCAT(
+            IF(targetSet IS NULL,
+                CONCAT('DELETE FROM `',sourceSet,'` WHERE NOT EXISTS (SELECT 1 FROM `'),
+                CONCAT('INSERT IGNORE INTO `',targetSet,'` (id) SELECT id FROM `',sourceSet,'` WHERE EXISTS (SELECT 1 FROM `')),
+            keepTabl,
+            '` AS data WHERE data.id=`', sourceSet, '`.`id` LIMIT 1)'
+        );
+
+
+    
+    PREPARE stmt3 FROM @stmtPOVStr;
+    PREPARE stmtPOVkeepTbl FROM @stmtPOVkeepTblStr;
+    IF vText IS NULL THEN
+        EXECUTE stmtPOVkeepTbl;
+    ELSE
+        SET @vText = vText;
+        EXECUTE stmtPOVkeepTbl USING @vText, @vText;
+    END IF;
+    EXECUTE stmt3;
+    DEALLOCATE PREPARE stmt3;
+    DEALLOCATE PREPARE stmtPOVkeepTbl;
+
+    SELECT @stmtPOVkeepTblStr as applyPOVStmt1, @stmtPOVStr as applyPOVStmt2, keepTabl as applyPOVIntermediateResultSet;
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applyRefPOV` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyRefPOV`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in properties VARCHAR(255), in refs VARCHAR(255))
+BEGIN
+    DECLARE data VARCHAR(20000) DEFAULT CONCAT('(SELECT domain_id, entity_id, property_id FROM `reference_data` AS subdata WHERE EXISTS (SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1))');
+    SET @stmtRefPOVStr = makeStmt(sourceSet,targetSet,data,properties);
+
+    PREPARE stmt4 FROM @stmtRefPOVStr;
+    EXECUTE stmt4;
+		
+    SELECT @stmtRefPOVstr as applyRefPOVStmt;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applySAT` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applySAT`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in loc MEDIUMTEXT, in op CHAR(5))
+BEGIN
+
+    IF targetSet IS NULL OR sourceSet = targetSet THEN
+        SET @stmtSATString = CONCAT('DELETE FROM `', sourceSet, '` WHERE id NOT IN (SELECT file_id FROM files WHERE path ', op, ' ?)');  
+    ELSE
+        SET @stmtSATString = CONCAT('INSERT INTO `', targetSet, '` (id) SELECT data.id FROM `',sourceSet,'` as data WHERE EXISTS (SELECT 1 FROM `files` as f WHERE f.file_id=data.id AND f.path ', op, ' ?)');
+    END IF;
+    PREPARE stmtSAT FROM @stmtSATString;
+	SET @loc = loc;
+    EXECUTE stmtSAT USING @loc;
+    DEALLOCATE PREPARE stmtSAT;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `applyTransactionFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `applyTransactionFilter`(in sourceSet VARCHAR(255), targetSet VARCHAR(255), in transaction VARCHAR(255), in operator_u CHAR(2), in realm VARCHAR(255), in userName VARCHAR(255), in ilb BIGINT, in ilb_nanos INT UNSIGNED, in eub BIGINT, in eub_nanos INT UNSIGNED, in operator_t CHAR(2))
+BEGIN
+	DECLARE data TEXT default CONCAT('(SELECT entity_id FROM transaction_log AS t WHERE t.transaction=\'', 
+		transaction, 
+		'\'',
+		IF(userName IS NOT NULL, 
+			CONCAT(' AND t.realm', operator_u, '? AND t.username', operator_u, '?'),
+			'' 
+		),
+		IF(ilb IS NOT NULL, 
+			CONCAT(" AND", constructDateTimeWhereClauseForColumn("t.seconds", "t.nanos", ilb, ilb_nanos, eub, eub_nanos, operator_t)),
+			""
+		),
+		')'
+	);
+
+	SET @stmtTransactionStr = makeStmt(sourceSet,targetSet,data,NULL);
+	PREPARE stmtTransactionFilter from @stmtTransactionStr;
+	IF userName IS NOT NULL THEN
+		SET @userName = userName;
+		SET @realm = realm;
+		EXECUTE stmtTransactionFilter USING @realm, @userName;
+	ELSE
+		EXECUTE stmtTransactionFilter;
+	END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `calcComplementUnion` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcComplementUnion`(in targetSet VARCHAR(255), in subResultSet VARCHAR(255), in universe VARCHAR(255))
+BEGIN
+    SET @stmtComplementUnionStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` SELECT id FROM `',universe, '` AS universe WHERE NOT EXISTS ( SELECT 1 FROM `', subResultSet,'` AS diff WHERE diff.id=universe.id)');
+    PREPARE stmtComplementUnion FROM @stmtComplementUnionStr;
+    EXECUTE stmtComplementUnion;
+    DEALLOCATE PREPARE stmtComplementUnion;
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `calcDifference` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcDifference`(in resultSetTable VARCHAR(255), in diff VARCHAR(255))
+BEGIN
+    SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE EXISTS ( SELECT 1 FROM `', diff,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)');
+    PREPARE diffStmt FROM @diffStmtStr;
+    EXECUTE diffStmt;
+    DEALLOCATE PREPARE diffStmt;
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `calcIntersection` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcIntersection`(in resultSetTable VARCHAR(255), in intersectWith VARCHAR(255))
+BEGIN
+    SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE NOT EXISTS ( SELECT 1 FROM `', intersectWith,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)');
+    PREPARE diffStmt FROM @diffStmtStr;
+    EXECUTE diffStmt;
+
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `calcUnion` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `calcUnion`(in targetSet VARCHAR(255), in sourceSet VARCHAR(255))
+BEGIN
+    SET @diffStmtStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` (id) SELECT id FROM `',sourceSet,'`');
+    PREPARE diffStmt FROM @diffStmtStr;
+    EXECUTE diffStmt;
+    DEALLOCATE PREPARE diffStmt;
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `cleanUpLinCon` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `cleanUpLinCon`()
+BEGIN
+
+    DELETE FROM units_lin_con WHERE NOT EXISTS (SELECT '1' FROM double_data WHERE unit_sig=signature_from) AND NOT EXISTS (SELECT '1' FROM integer_data WHERE unit_sig=signature_from);
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `cleanUpQuery` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `cleanUpQuery`()
+BEGIN
+   SELECT * from warnings;
+
+   SET @pstmtstr = CONCAT('DROP TEMPORARY TABLE IF EXISTS `warnings`', IF(@tempTableList IS NULL, '', CONCAT(',',@tempTableList)));
+   PREPARE pstmt FROM @pstmtstr;
+   EXECUTE pstmt;
+
+   SET @tempTableList = NULL;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `copyTable` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `copyTable`(in fromTable VARCHAR(255), in toTable VARCHAR(255))
+BEGIN
+    SET @copyTableStmtStr = CONCAT('INSERT IGNORE INTO `', toTable, '` (id) SELECT id FROM `', fromTable, '`');
+    PREPARE copyTableStmt FROM @copyTableStmtStr;
+    EXECUTE copyTableStmt;
+    DEALLOCATE PREPARE copyTableStmt;
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `createTmpTable`(out newTableName VARCHAR(255))
+BEGIN
+    call registerTempTableName(newTableName);
+    
+    SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( id INT UNSIGNED PRIMARY KEY)' );
+    
+    PREPARE createTableStmt FROM @createTableStmtStr; 
+    EXECUTE createTableStmt;
+    DEALLOCATE PREPARE createTableStmt;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable2` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `createTmpTable2`(out newTableName VARCHAR(255))
+BEGIN
+    call registerTempTableName(newTableName);
+	SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( id INT UNSIGNED, id2 INT UNSIGNED, domain INT UNSIGNED, CONSTRAINT `', newTableName,'PK` PRIMARY KEY (id,id2,domain) )' );
+    
+    PREPARE createTableStmt FROM @createTableStmtStr; 
+    EXECUTE createTableStmt;
+    DEALLOCATE PREPARE createTableStmt;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `deleteEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteEntity`(in EntityID INT UNSIGNED)
+BEGIN
+
+
+	DELETE FROM files where file_id=EntityID;
+	DELETE FROM data_type WHERE domain_id=0 and (entity_id=0 and property_id=EntityID) or entity_id=EntityID; 
+	DELETE FROM collection_type WHERE domain_id=0 and (entity_id=0 and property_id=EntityID) or entity_id=EntityID; 
+	DELETE FROM entities where id=EntityID;
+	DELETE FROM entity_acl WHERE NOT EXISTS (SELECT 1 FROM entities WHERE entities.acl = entity_acl.id LIMIT 1);
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `deleteEntityProperties` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteEntityProperties`(in EntityID INT UNSIGNED)
+BEGIN
+
+CALL deleteIsa(EntityID);
+
+DELETE FROM reference_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM null_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM text_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM name_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM enum_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM integer_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM double_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM datetime_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM date_data 
+where (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM name_overrides
+WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM desc_overrides
+WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID;
+DELETE FROM data_type 
+WHERE (domain_id=0 AND entity_id=EntityID) OR domain_id=EntityID OR (domain_id=0 AND entity_id=0 AND property_id=EntityID);
+
+DELETE FROM query_template_def WHERE id=EntityID;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `deleteIsa` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteIsa`(IN EntityID INT UNSIGNED)
+BEGIN
+	
+	DELETE FROM isa_cache WHERE child=EntityID or rpath=EntityID or rpath LIKE concat('%>',EntityID) or rpath LIKE concat('%>', EntityID, '>%');
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `deleteLinCon` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `deleteLinCon`(in sig BIGINT)
+BEGIN
+
+    DELETE FROM units_lin_con WHERE signature_from=sig;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `entityACL` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `entityACL`(out ACLID INT UNSIGNED, in ACLSTR VARBINARY(65525))
+BEGIN
+   SELECT id INTO ACLID FROM entity_acl as t WHERE t.acl=ACLSTR LIMIT 1;
+   IF ACLID IS NULL THEN
+		INSERT INTO entity_acl (acl) VALUES (ACLSTR);
+		SET ACLID = LAST_INSERT_ID();
+   END IF;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `finishNegationFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `finishNegationFilter`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in subResultSet VARCHAR(255))
+BEGIN
+    IF targetSet IS NULL OR sourceSet = targetSet THEN
+        call calcDifference(sourceSet, subResultSet);
+    ELSE
+        call calcComplementUnion(targetSet,subResultSet,sourceSet);
+    END IF;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `finishSubProperty` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `finishSubProperty`(in sourceSet VARCHAR(255),in targetSet VARCHAR(255), in list VARCHAR(255))
+BEGIN
+    DECLARE data VARCHAR(20000) DEFAULT CONCAT('`',list,'`');
+    SET @finishSubPropertyStmtStr = makeStmt(sourceSet, targetSet, data, NULL);
+
+	PREPARE finishSubPropertyStmt FROM @finishSubPropertyStmtStr;
+	EXECUTE finishSubPropertyStmt;
+    DEALLOCATE PREPARE finishSubPropertyStmt;
+        
+    SELECT @finishSubPropertyStmtStr AS finishSubPropertyStmt;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getChildren` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getChildren`(in tableName varchar(255))
+BEGIN
+
+    CREATE TEMPORARY TABLE dependTemp (id INT UNSIGNED PRIMARY KEY);
+
+
+
+    SET @initDepend = CONCAT('INSERT IGNORE INTO dependTemp (id) SELECT i.child FROM isa_cache AS i INNER JOIN `', tableName, '` AS t ON (i.parent=t.id);');
+    PREPARE initDependStmt FROM @initDepend;
+
+	EXECUTE initDependStmt;
+	IF ROW_COUNT() != 0 THEN
+    	SET @transfer = CONCAT('INSERT IGNORE INTO `', tableName, '` (id) SELECT id FROM dependTemp');
+        PREPARE transferstmt FROM @transfer;
+		EXECUTE transferstmt;
+		DEALLOCATE PREPARE transferstmt;
+	END IF;
+
+
+	DEALLOCATE PREPARE initDependStmt;
+	DROP TEMPORARY TABLE dependTemp;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getDependentEntities` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getDependentEntities`(in EntityID INT UNSIGNED)
+BEGIN
+
+DROP TEMPORARY TABLE IF EXISTS refering;		
+CREATE TEMPORARY TABLE refering (
+id INT UNSIGNED UNIQUE
+);
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM reference_data WHERE (value=EntityID OR property_id=EntityID) AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM reference_data WHERE (value=EntityID OR property_id=EntityID) AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM text_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM text_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM enum_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM enum_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM name_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM name_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM integer_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM integer_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM double_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM double_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM datetime_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM datetime_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM date_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM date_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id FROM null_data WHERE property_id=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id FROM null_data WHERE property_id=EntityID AND domain_id!=EntityID AND entity_id!=EntityID AND domain_id!=0; 
+
+INSERT IGNORE INTO refering (id) SELECT entity_id from data_type WHERE datatype=EntityID AND domain_id=0 AND entity_id!=EntityID;
+INSERT IGNORE INTO refering (id) SELECT domain_id from data_type WHERE datatype=EntityID;
+
+
+Select id from refering WHERE id!=0 and id!=EntityID;
+
+DROP TEMPORARY TABLE refering;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getFile` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getFile`(in FileID INT)
+BEGIN 
+
+Select name, description, role into @name, @description, @role from entities where id=FileID LIMIT 1;
+
+IF @role = 'file' Then
+		Select path, hash, size into @FilePath, @FileHash, @FileSize from files where file_id=FileID LIMIT 1;
+		Select timestamp, user_id, user_agent into @FileCreated, @FileCreator, @FileGenerator from history where entity_id=FileID AND event='insertion' LIMIT 1;
+
+Select 
+FileID as FileID,
+@FilePath as FilePath,
+@FileSize as FileSize,
+@FileHash as FileHash,
+@FileDescription as FileDescription,
+@FileCreated as FileCreated,
+@FileCreator as FileCreator,
+@FileGenerator as FileGenerator,
+NULL	as FileOwner,
+NULL as FilePermission,
+NULL as FileChecksum;
+
+END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getFileIdByPath` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getFileIdByPath`(in FilePath VARCHAR(255))
+BEGIN 
+
+Select file_id as FileID from files where path=FilePath LIMIT 1;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getRole` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getRole`(in RoleName VARCHAR(255))
+BEGIN
+
+Select e.id INTO @RoleID from entities e where e.name=RoleName AND e.role=RoleName LIMIT 1;
+
+call retrieveEntity(@RoleID);
+
+
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `getRules` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `getRules`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in TransType VARCHAR(255))
+BEGIN
+
+		
+		
+		
+SELECT rules.transaction, rules.criterion, rules.modus from rules where if(DomainID is null, rules.domain_id=0,rules.domain_id=DomainID) AND if(EntityID is null, rules.entity_id=0,rules.entity_id=EntityID) AND if(TransType is null,true=true,rules.transaction=TransType);
+
+
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initAutoIncrement` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initAutoIncrement`()
+BEGIN
+
+    SELECT @max := MAX(entity_id)+ 1 FROM transaction_log; 
+    IF @max IS NOT NULL THEN
+        SET @stmtStr = CONCAT('ALTER TABLE entities AUTO_INCREMENT=',@max);
+        PREPARE stmt FROM @stmtStr;
+        EXECUTE stmt;
+        DEALLOCATE PREPARE stmt;
+    END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initBackReference` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initBackReference`(in pid INT UNSIGNED, in pname VARCHAR(255), in entity_id INT UNSIGNED, in ename VARCHAR(255))
+BEGIN
+	DECLARE propertiesTable VARCHAR(255) DEFAULT NULL;
+	DECLARE entitiesTable VARCHAR(255) DEFAULT NULL;
+
+    IF pname IS NOT NULL THEN
+        call createTmpTable(propertiesTable);
+        call initSubEntity(pid, pname, propertiesTable);
+    END IF;
+
+    IF ename IS NOT NULL THEN
+        call createTmpTable(entitiesTable);
+        call initSubEntity(entity_id, ename, entitiesTable);
+    END IF;
+
+	SELECT propertiesTable, entitiesTable;
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initConjunctionFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initConjunctionFilter`(in sourceSet VARCHAR(255))
+BEGIN
+    DECLARE newTableName VARCHAR(255) DEFAULT NULL;
+    call createTmpTable(newTableName);
+    call copyTable(sourceSet, newTableName);
+    SELECT newTableName AS newTableName;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initDisjunctionFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initDisjunctionFilter`()
+BEGIN
+    call initEmptyTargetSet(NULL);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initEmptyTargetSet` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initEmptyTargetSet`(in targetSet VARCHAR(255))
+BEGIN
+    DECLARE newTableName VARCHAR(255) DEFAULT targetSet;
+    IF targetSet IS NOT NULL THEN
+        SET @isNotEmptyVar = NULL; 
+        SET @isEmptyStmtStr = CONCAT("SELECT 1 INTO @isNotEmptyVar FROM `",targetSet,"` LIMIT 1");
+        PREPARE stmtIsNotEmpty FROM @isEmptyStmtStr;
+        EXECUTE stmtIsNotEmpty;
+        DEALLOCATE PREPARE stmtIsNotEmpty;
+        IF @isNotEmptyVar IS NOT NULL THEN 
+            call createTmpTable(newTableName);
+        END IF;
+    ELSE
+        call createTmpTable(newTableName);
+    END IF;
+    SELECT newTableName AS newTableName;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initEntity`(in eid INT UNSIGNED, in ename VARCHAR(255), in enameLike VARCHAR(255), in enameRegexp VARCHAR(255), in resultset VARCHAR(255))
+initEntityLabel: BEGIN
+	SET @initEntityStmtStr = NULL;
+
+	IF ename IS NOT NULL THEN
+		SET @initEntityStmtStr = CONCAT('INSERT IGNORE INTO `',resultset,'` (id) SELECT id FROM entities WHERE name=? and id>=100 UNION ALL SELECT entity_id FROM name_data WHERE value=?;');
+		SET @query_param = ename;
+	ELSEIF enameLike IS NOT NULL THEN
+		SET @initEntityStmtStr = CONCAT('INSERT IGNORE INTO `',resultset,'` (id) SELECT id FROM entities WHERE name LIKE ? and id>=100 UNION ALL SELECT entity_id FROM name_data WHERE value LIKE ?;');
+		SET @query_param = enameLike;
+	ELSEIF enameRegexp IS NOT NULL THEN 
+		SET @initEntityStmtStr = CONCAT('INSERT IGNORE INTO `',resultset,'` (id) SELECT id FROM entities WHERE name REGEXP ? and id>=100 UNION ALL SELECT entity_id FROM name_data WHERE value REGEXP ?;');
+		SET @query_param = enameRegexp;
+    END IF;
+
+	IF @initEntityStmtStr IS NOT NULL THEN
+		PREPARE initEntityStmt FROM @initEntityStmtStr;
+		EXECUTE initEntityStmt USING @query_param, @query_param;
+		DEALLOCATE PREPARE initEntityStmt;
+    END IF;
+	
+    IF eid IS NOT NULL THEN
+		SET @initEntityStmtStr = CONCAT('INSERT IGNORE INTO `',resultset,'` (id) SELECT id FROM entities WHERE id=',eid,';');
+		PREPARE initEntityStmt FROM @initEntityStmtStr;
+		EXECUTE initEntityStmt;
+		DEALLOCATE PREPARE initEntityStmt;
+    END IF;
+
+	
+	IF @initEntityStmtStr IS NOT NULL THEN
+    	call getChildren(resultset);
+	END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initNegationFilter` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initNegationFilter`(in sourceSet VARCHAR(255))
+BEGIN
+    DECLARE newTableName VARCHAR(255) DEFAULT NULL;
+    call createTmpTable(newTableName);
+    call copyTable(sourceSet, newTableName);
+    SELECT newTableName AS newTableName;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initPOVPropertiesTable` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initPOVPropertiesTable`(in pid INT UNSIGNED, in pname VARCHAR(255), in sourceSet VARCHAR(255))
+BEGIN
+    DECLARE propertiesTable VARCHAR(255) DEFAULT NULL; 
+    DECLARE replTbl VARCHAR(255) DEFAULT NULL;
+   	DECLARE ecount INT DEFAULT 0;
+    DECLARE t1 BIGINT DEFAULT 0;
+    DECLARE t2 BIGINT DEFAULT 0;
+    DECLARE t3 BIGINT DEFAULT 0;
+    DECLARE t4 BIGINT DEFAULT 0;
+    DECLARE t5 BIGINT DEFAULT 0;
+    DECLARE t6 BIGINT DEFAULT 0;
+
+
+    IF pname is NOT NULL THEN 
+        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t1 from (select uuid() uid) as alias;
+        call createTmpTable2(propertiesTable);
+        
+        
+        SET @initPOVPropertiesTableStmt1 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) SELECT id, 0, 0 FROM entities WHERE name = ? UNION ALL SELECT property_id, entity_id, domain_id from name_overrides WHERE name = ? UNION ALL SELECT entity_id, domain_id, 0 FROM name_data WHERE value = ?;');
+        PREPARE stmt FROM @initPOVPropertiesTableStmt1;
+        SET @pname = pname;
+        EXECUTE stmt USING @pname, @pname, @pname;
+        SET ecount = ROW_COUNT();
+
+        
+        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t2 from (select uuid() uid) as alias;
+        IF pid IS NOT NULL THEN
+            SET @initPOVPropertiesTableStmt2 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) VALUES (?, 0, 0)');
+            PREPARE stmt FROM @initPOVPropertiesTableStmt2;
+            SET @pid = pid;
+            EXECUTE stmt USING @pid;
+            SET ecount = ecount + ROW_COUNT();
+        END IF;
+
+        
+        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t3 from (select uuid() uid) as alias;
+        IF ecount > 0 THEN
+            call getChildren(propertiesTable);
+        END IF;
+        
+        
+        SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t4 from (select uuid() uid) as alias;
+        IF ecount > 0 THEN
+            call createTmpTable2(replTbl);
+            SET @replTblStmt1 := CONCAT('INSERT IGNORE INTO `',replTbl, '` (id, id2, domain) SELECT r.value as id, r.entity_id as id2, 0 as domain_id FROM reference_data AS r WHERE status="REPLACEMENT" AND domain_id=0 AND EXISTS (SELECT * FROM `', sourceSet, '` AS s WHERE s.id=r.entity_id) AND EXISTS (SELECT * FROM `', propertiesTable, '` AS p WHERE p.domain = 0 AND p.id2=0 AND p.id=r.property_id);');
+            PREPARE replStmt1 FROM @replTblStmt1;
+            EXECUTE replStmt1;
+            DEALLOCATE PREPARE replStmt1;
+            SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t5 from (select uuid() uid) as alias;
+
+            SET @replTblStmt2 := CONCAT('INSERT IGNORE INTO `', propertiesTable, '` SELECT id, id2, domain FROM `', replTbl, '`;');
+            PREPARE replStmt2 FROM @replTblStmt2;
+            EXECUTE replStmt2;
+            DEALLOCATE PREPARE replStmt2;
+            SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t6 from (select uuid() uid) as alias;
+        END IF;
+    END IF;
+    SELECT propertiesTable, t1, t2, t3, t4, t5, t6, @initPOVPropertiesTableStmt1 as initPOVPropertiesTableStmt1, @initPOVPropertiesTableStmt2 as initPOVPropertiesTableStmt2, @replTblStmt1 as replTblStmt1, @replTblStmt2 as replTblStmt2;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initPOVRefidsTable` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initPOVRefidsTable`(in vInt INT UNSIGNED, in vText VARCHAR(255))
+BEGIN
+    DECLARE refIdsTable VARCHAR(255) DEFAULT NULL; 
+	
+    
+    IF vText IS NOT NULL THEN
+        call createTmpTable(refIdsTable);
+        call initSubEntity(vInt, vText, refIdsTable);
+        
+    END IF;
+    SELECT refIdsTable;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initQuery` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initQuery`()
+BEGIN
+    CREATE TEMPORARY TABLE IF NOT EXISTS warnings (warning TEXT NOT NULL);
+	
+	call createTmpTable(@resultSet);
+    SELECT @resultSet as tablename;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initSubEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initSubEntity`(in e_id INT UNSIGNED, in ename VARCHAR(255), in tableName VARCHAR(255))
+BEGIN
+	DECLARE ecount INT DEFAULT 0;
+   
+    SET @stmtStr = CONCAT('INSERT IGNORE INTO `', tableName, '` (id) SELECT id FROM entities WHERE name = ? UNION ALL SELECT entity_id FROM name_data WHERE value=? AND domain_id=0;');
+    PREPARE stmt FROM @stmtStr;
+	SET @ename = ename;
+    EXECUTE stmt USING @ename, @ename;
+    SET ecount = ROW_COUNT();
+	DEALLOCATE PREPARE stmt;
+
+    IF e_id IS NOT NULL THEN
+        SET @stmtStr = CONCAT('INSERT IGNORE INTO `', tableName, '` (id) VALUES (', e_id, ')');
+        PREPARE stmt FROM @stmtStr;
+        EXECUTE stmt;
+        SET ecount = ecount + ROW_COUNT();
+		DEALLOCATE PREPARE stmt;
+    END IF;
+
+    IF ecount > 0 THEN
+        call getChildren(tableName);
+    END IF;
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `initSubProperty` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `initSubProperty`(in sourceSet VARCHAR(255), in propertiesTable VARCHAR(255), in refIdsTable VARCHAR(255))
+BEGIN
+DECLARE newTableName VARCHAR(255) DEFAULT NULL;
+    call registerTempTableName(newTableName);	
+    
+    SET @createSubPropertyListTableStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, domain INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (entity_id, id, domain)) ' );
+    
+    PREPARE createSubPropertyListTable FROM @createSubPropertyListTableStr; 
+    EXECUTE createSubPropertyListTable;
+    DEALLOCATE PREPARE createSubPropertyListTable;
+
+	SET @subResultSetStmtStr = CONCAT('INSERT IGNORE INTO `', newTableName, '` (domain, entity_id, id) 
+            SELECT data1.domain_id as domain, data1.entity_id as entity_id, data1.value as id 
+                FROM reference_data as data1 JOIN reference_data as data2 
+                    ON (data1.domain_id=0 
+                        AND data1.domain_id=data2.domain_id 
+                        AND data2.entity_id=data1.entity_id 
+                        AND (
+                            (data1.property_id=data2.value AND data2.status="REPLACEMENT")
+                            OR
+                            (data1.property_id!=data2.value AND data2.status!="REPLACEMENT" AND data1.status!="REPLACEMENT" AND data1.property_id=data2.property_id)
+                        )
+                        AND EXISTS (SELECT 1 FROM `', sourceSet, '` as source WHERE source.id=data1.entity_id LIMIT 1)',
+                        IF(propertiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', propertiesTable, '` as props WHERE props.id=data2.property_id LIMIT 1)')),
+                        IF(refIdsTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', refIdsTable, '` as refs WHERE refs.id=data1.value LIMIT 1)')),	
+		')'
+        );
+
+
+	PREPARE subResultSetStmt FROM @subResultSetStmtStr;
+	EXECUTE subResultSetStmt;
+    DEALLOCATE PREPARE subResultSetStmt;
+
+	SELECT newTableName as list;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `insertEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertEntity`(in EntityName VARCHAR(255), in EntityDesc TEXT, in EntityRole VARCHAR(255), in ACL VARBINARY(65525))
+BEGIN
+    DECLARE NewEntityID INT UNSIGNED DEFAULT NULL;
+    DECLARE NewACLID INT UNSIGNED DEFAULT NULL;
+
+    call entityACL(NewACLID, ACL);
+
+    INSERT INTO entities (name, description, role, acl) VALUES (EntityName, EntityDesc, EntityRole, NewACLID);
+    SET NewEntityID = LAST_INSERT_ID();
+
+    Select NewEntityID as EntityID;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `insertEntityProperty` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertEntityProperty`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Datatable VARCHAR(255), in PropertyValue TEXT, in PropertyUnitSig BIGINT, in PropertyStatus VARCHAR(255), in NameOverride VARCHAR(255), in DescOverride TEXT, in datatypeOverride INT UNSIGNED, in Collection VARCHAR(255), in PropertyIndex INT UNSIGNED)
+BEGIN
+
+	CASE Datatable
+	WHEN 'double_data' THEN
+		INSERT INTO double_data 
+		(domain_id, entity_id, property_id, value, unit_sig, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex);
+	WHEN 'integer_data' THEN
+		INSERT INTO integer_data 
+		(domain_id, entity_id, property_id, value, unit_sig, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex);
+	WHEN 'datetime_data' THEN
+		INSERT INTO datetime_data 
+		(domain_id, entity_id, property_id, value, value_ns, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, SUBSTRING_INDEX(PropertyValue, 'UTC', 1), IF(SUBSTRING_INDEX(PropertyValue, 'UTC', -1)='',NULL,SUBSTRING_INDEX(PropertyValue, 'UTC', -1)), PropertyStatus, PropertyIndex);
+	WHEN 'reference_data' THEN
+		INSERT INTO reference_data 
+		(domain_id, entity_id, property_id, value, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
+	WHEN 'enum_data' THEN	
+		INSERT INTO enum_data 
+		(domain_id, entity_id, property_id, value, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
+	WHEN 'date_data' THEN	
+		INSERT INTO date_data 
+		(domain_id, entity_id, property_id, value, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, SUBSTRING_INDEX(PropertyValue, '.', 1), PropertyStatus, PropertyIndex);
+	WHEN 'text_data' THEN
+		INSERT INTO text_data 
+		(domain_id, entity_id, property_id, value, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
+	WHEN 'null_data' THEN
+		INSERT INTO null_data
+		(domain_id, entity_id, property_id, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyStatus, PropertyIndex);
+	WHEN 'name_data' THEN
+		INSERT INTO name_data
+		(domain_id, entity_id, property_id, value, status, pidx) 
+		VALUES 
+		(DomainID, EntityID, PropertyID, PropertyValue, PropertyStatus, PropertyIndex);
+
+	ELSE	
+		SELECT * FROM table_does_not_exist;
+	END CASE;
+
+	IF DatatypeOverride IS NOT NULL THEN
+		call overrideType(DomainID, EntityID, PropertyID, DatatypeOverride);
+		IF Collection IS NOT NULL THEN
+			INSERT INTO collection_type (domain_id, entity_id, property_id, collection) VALUES (DomainID, EntityID, PropertyID, Collection);
+		END IF;
+	END IF;
+
+	IF NameOverride IS NOT NULL THEN
+		call overrideName(DomainID, EntityID, PropertyID, NameOverride);	
+	END IF;
+
+	IF DescOverride IS NOT NULL THEN
+		call overrideDesc(DomainID, EntityID, PropertyID, DescOverride);
+	END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `insertIsa` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertIsa`(IN c INT UNSIGNED, IN p INT UNSIGNED)
+BEGIN
+
+    INSERT INTO isa_cache (child, parent, rpath) VALUES (c,p,c);
+	
+    
+    
+    INSERT IGNORE INTO isa_cache SELECT c AS child, i.parent AS parent, IF(p=i.rpath or i.rpath=parent, p, concat(p, ">", i.rpath)) AS rpath FROM isa_cache AS i WHERE i.child = p;
+    
+    
+    
+    INSERT IGNORE INTO isa_cache SELECT l.child, r.parent, if(l.rpath=l.child and r.rpath=c, c, concat(if(l.rpath=l.child,c,concat(l.rpath, '>', c)), if(r.rpath=c,'',concat('>', r.rpath)))) AS rpath FROM isa_cache as l INNER JOIN isa_cache as r ON (l.parent = r.child AND l.parent=c);
+
+
+	
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `insertLinCon` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertLinCon`(in signature_from BIGINT, in signature_to BIGINT, in a DECIMAL(65,30), in b_dividend BIGINT, in b_divisor BIGINT, in c DECIMAL(65,30))
+BEGIN
+
+    INSERT IGNORE INTO units_lin_con (signature_from, signature_to, a, b_dividend, b_divisor, c) VALUES (signature_from, signature_to, a, b_dividend, b_divisor, c);
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `insertUser` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `insertUser`(in Name VARCHAR(255), in Password VARCHAR(255))
+BEGIN 
+
+
+INSERT INTO entities (name, role, acl) VALUES (Name, 'USER', 0);
+
+SET @LAST_UserID = LAST_INSERT_ID();
+
+INSERT INTO passwords VALUES (@LAST_UserID, Password);
+
+Select @LAST_UserID as UserID; 
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `intersectTable` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `intersectTable`(in resultSetTable VARCHAR(255), in diff VARCHAR(255))
+BEGIN
+    SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE id NOT IN ( SELECT id FROM `', diff,'`)');
+    PREPARE diffStmt FROM @diffStmtStr;
+    EXECUTE diffStmt;
+
+    
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `isSubtype` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `isSubtype`(in c INT UNSIGNED, in p INT UNSIGNED)
+BEGIN
+	DECLARE ret BOOLEAN DEFAULT FALSE;
+	SELECT TRUE INTO ret FROM isa_cache AS i WHERE i.child=c AND i.parent=p LIMIT 1;
+    SELECT ret as ISA;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `overrideDesc` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideDesc`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Description TEXT)
+BEGIN
+	INSERT INTO desc_overrides (domain_id, entity_id, property_id, description) VALUES (DomainID, EntityID, PropertyID, Description);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `overrideName` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideName`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Name VARCHAR(255))
+BEGIN
+	INSERT INTO name_overrides (domain_id, entity_id, property_id, name) VALUES (DomainID, EntityID, PropertyID, Name);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `overrideType` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `overrideType`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in PropertyID INT UNSIGNED, in Datatype INT UNSIGNED)
+BEGIN
+	INSERT INTO data_type (domain_id, entity_id, property_id, datatype) VALUES (DomainID, EntityID, PropertyID, Datatype);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `raiseWarning` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `raiseWarning`(in str VARCHAR(20000))
+BEGIN
+    INSERT INTO warnings VALUES (str);
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `registerSubdomain` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `registerSubdomain`(in Count INT UNSIGNED)
+BEGIN
+    DECLARE ED INTEGER DEFAULT NULL;
+
+Select COUNT(id) INTO ED FROM entities WHERE Role='DOMAIN' AND id!=0;
+WHILE ED < Count DO
+		INSERT INTO entities (name, description, role, acl) VALUES (NULL, "Multipurpose subdomain", 'DOMAIN', 0);
+		SET ED = ED + 1;
+END WHILE;
+
+SELECT id as DomainID FROM entities WHERE Role='DOMAIN' and id!=0;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `registerTempTableName` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `registerTempTableName`(out newTableName VARCHAR(255))
+BEGIN
+    SET newTableName = md5(CONCAT(RAND(),CURRENT_TIMESTAMP()));
+    SET @tempTableList = IF(@tempTableList IS NULL, CONCAT('`',newTableName,'`'), CONCAT(@tempTableList, ',`', newTableName, '`'));
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveDatatype` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveDatatype`(in DatatypeName VARCHAR(255))
+BEGIN
+
+Select e.id INTO @DatatypeID from entities e where e.name=DatatypeName AND e.role='DATATYPE' LIMIT 1;
+
+call retrieveEntity(@DatatypeID);
+
+
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntity`(in EntityID INT UNSIGNED)
+BEGIN
+	DECLARE FilePath VARCHAR(255) DEFAULT NULL;
+	DECLARE FileSize VARCHAR(255) DEFAULT NULL;
+	DECLARE FileHash VARCHAR(255) DEFAULT NULL;
+	DECLARE DatatypeID INT UNSIGNED DEFAULT NULL;
+    DECLARE CollectionName VARCHAR(255) DEFAULT NULL;
+	
+	Select path, size, hex(hash) into FilePath, FileSize, FileHash from files where file_id = EntityID LIMIT 1;
+	Select datatype into DatatypeID from data_type where domain_id=0 and entity_id=0 and property_id=EntityID LIMIT 1;
+
+	SELECT collection into CollectionName from collection_type where domain_id=0 and entity_id=0 and property_id=EntityID LIMIT 1;
+
+	Select 
+		(Select name from entities where id=DatatypeID) as Datatype, 
+		CollectionName as Collection,
+		EntityID as EntityID, 
+		e.name as EntityName, 
+		e.description as EntityDesc, 
+		e.role as EntityRole, 
+		FileSize as FileSize, 
+		FilePath as FilePath, 
+		FileHash as FileHash,
+		(SELECT acl FROM entity_acl as a WHERE a.id = e.acl) as ACL
+	from entities e where id = EntityID LIMIT 1;
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityParents` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntityParents`(in EntityID INT UNSIGNED)
+BEGIN
+
+SELECT parent AS ParentID, name AS ParentName, description AS ParentDescription, role AS ParentRole, (SELECT acl from entity_acl as a WHERE a.id=e.acl) AS ACL FROM isa_cache AS i JOIN entities AS e ON (i.parent=e.id AND i.child=EntityID and i.rpath=EntityID); 
+
+
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityProperties` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveEntityProperties`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED)
+BEGIN
+
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from double_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+		
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from integer_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, CONCAT(value, '.NULL.NULL') as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from date_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, CONCAT(value, 'UTC', IF(value_ns IS NULL, '', value_ns)) as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from datetime_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from text_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from enum_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from reference_data where domain_id = DomainID and entity_id = EntityID
+
+		UNION ALL
+
+		
+		Select
+		property_id as PropertyID, NULL AS PropertyValue, status as PropertyStatus, pidx as PropertyIndex from null_data WHERE domain_id = DomainID and entity_id = EntityID
+		
+		UNION ALL
+
+		
+		Select 
+		property_id as PropertyID, value as PropertyValue, status as PropertyStatus, pidx as PropertyIndex from name_data where domain_id = DomainID and entity_id = EntityID;
+
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveGroup` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveGroup`(in USERID INT UNSIGNED, in GROUPID INT UNSIGNED)
+BEGIN 
+IF USERID IS NOT NULL THEN 
+  Select user_id, group_id from groups where user_id = USERID;
+ELSEIF GROUPID IS NOT NULL THEN
+  Select user_id, group_id from groups where group_id = GROUPID;
+END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `retrieveOverrides` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `retrieveOverrides`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED)
+BEGIN
+
+	SELECT NULL as collection_override, name as name_override, NULL as desc_override, NULL as type_override, entity_id, property_id from name_overrides where domain_id=DomainID and entity_id=EntityID UNION ALL
+	SELECT NULL as collection_override, NULL as name_override, description as desc_override, NULL as type_override, entity_id, property_id from desc_overrides where domain_id=DomainID and entity_id=EntityID UNION ALL
+	SELECT NULL as collection_override, NULL as name_override, NULL as desc_override, (Select name from entities where id=datatype LIMIT 1) as type_override, entity_id, property_id from data_type where domain_id=DomainID and entity_id=EntityID UNION ALL
+	SELECT collection as collection_override, NULL as name_override, NULL as desc_override, NULL as type_override, entity_id, property_id from collection_type where domain_id=DomainID and entity_id=EntityID;
+	
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `setPassword` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `setPassword`(in EntityID INT UNSIGNED, in NewPassword VARCHAR(255))
+BEGIN
+
+
+	DELETE FROM passwords where entity_id=EntityID;
+	INSERT INTO passwords (entity_id, password) VALUES (EntityID, NewPassword);
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `showEntityAutoIncr` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `showEntityAutoIncr`()
+BEGIN
+SELECT `AUTO_INCREMENT`
+FROM  INFORMATION_SCHEMA.TABLES
+WHERE TABLE_SCHEMA = 'caosdb'
+AND   TABLE_NAME   = 'entities';
+
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `updateEntity` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `updateEntity`(in EntityID INT UNSIGNED, in EntityName VARCHAR(255), in EntityDescription TEXT, in EntityRole VARCHAR(255), in Datatype VARCHAR(255), in Collection VARCHAR(255), in ACL VARBINARY(65525))
+BEGIN 
+	DECLARE ACLID INT UNSIGNED DEFAULT NULL;
+	call entityACL(ACLID, ACL);
+
+	UPDATE entities e SET e.name = EntityName, e.description = EntityDescription, e.role=EntityRole, e.acl = ACLID where e.id = EntityID;
+
+	DELETE from data_type where domain_id=0 AND entity_id=0 AND property_id=EntityID;
+    DELETE from collection_type where domain_id=0 AND entity_id=0 AND property_id=EntityID;
+
+    IF Datatype IS NOT NULL THEN
+        INSERT INTO data_type (domain_id, entity_id, property_id, datatype) SELECT 0, 0, EntityID, ( SELECT id from entities where name = Datatype LIMIT 1);
+		IF Collection IS NOT NULL THEN
+        	INSERT INTO collection_type (domain_id, entity_id, property_id, collection) SELECT 0, 0, EntityID, Collection;
+		END IF;
+    END IF;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!50003 DROP PROCEDURE IF EXISTS `updateLinCon` */;
+/*!50003 SET @saved_cs_client      = @@character_set_client */ ;
+/*!50003 SET @saved_cs_results     = @@character_set_results */ ;
+/*!50003 SET @saved_col_connection = @@collation_connection */ ;
+/*!50003 SET character_set_client  = utf8mb4 */ ;
+/*!50003 SET character_set_results = utf8mb4 */ ;
+/*!50003 SET collation_connection  = utf8mb4_general_ci */ ;
+/*!50003 SET @saved_sql_mode       = @@sql_mode */ ;
+/*!50003 SET sql_mode              = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ;
+DELIMITER ;;
+CREATE DEFINER=`caosdb`@`%` PROCEDURE `updateLinCon`(in sig_from BIGINT, in sig_to BIGINT, in new_a DECIMAL(65,30), in new_b_dividend BIGINT, in new_b_divisor BIGINT, in new_c DECIMAL(65,30))
+BEGIN
+	UPDATE units_ling_con SET signature_to=sig_to, a=new_a, b_dividend=new_b_dividend, b_divisor=new_b_divisor, c=new_c where signature_from=sig_from;
+
+END ;;
+DELIMITER ;
+/*!50003 SET sql_mode              = @saved_sql_mode */ ;
+/*!50003 SET character_set_client  = @saved_cs_client */ ;
+/*!50003 SET character_set_results = @saved_cs_results */ ;
+/*!50003 SET collation_connection  = @saved_col_connection */ ;
+/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
+
+/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
+/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
+/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
+/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
+/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
+/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
+/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
+
+-- Dump completed on 2019-08-17  9:30:12
diff --git a/integrationtests/test-profile/profile.yml b/integrationtests/test-profile/profile.yml
index f830a2fbe6c6a4ae35362676db310f3eadf6f4cc..a0a18a2f30bb1c91e6d9d0a0677c38c4ae55857e 100644
--- a/integrationtests/test-profile/profile.yml
+++ b/integrationtests/test-profile/profile.yml
@@ -136,7 +136,7 @@ default:
         # grpc_server_port_https: 8443
         # HTTP port of the grpc end-point
         # grpc_server_port_http: 8080
-
+        _CAOSDB_INTEGRATION_TEST_SUITE_KEY: 10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2
   # Development configuration options
   # devel:
     # Copy the caosdb-server jar from this location into the Docker container.
diff --git a/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json b/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
index 9b81cc094bf7d1c35154d8f092a96d5f5fae35c9..e8b6fd4425872be128ae55d038d77bd1ff7bf4af 100644
--- a/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
+++ b/integrationtests/test_data/extroot/realworld_example/data/35/03_raw_data/001_dataset1/metadata.json
@@ -5,7 +5,7 @@
             {
                 "longitude": 18.445078548041533,
                 "start_datetime": "2022-02-10T16:36:48+01:00",
-                "latitude": 53.10833068997861,
+                "latitude": 53,
                 "elevation": 2,
                 "location": "Bremen, Germany"
             }
diff --git a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
index eaf2690ae130cb61c8a74452e3e4e1d4fd06846a..69cb53d4ffb86a3353fbccc2cae3dc3fbea25009 100644
--- a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
+++ b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
@@ -33,28 +33,28 @@ Data:
           validate: schema/dataspace.schema.json
           subtree:
             dataspace_id_element:
-              type: DictIntegerElement
+              type: IntegerElement
               match_name: "dataspace_id"
               match_value: "(?P<id>[0-9]+)"
               records:
                 Dataspace:
                   dataspace_id: $id
             archived_element:
-              type: DictBooleanElement
+              type: BooleanElement
               match_name: "archived"
               match_value: "(?P<archived>.*)"
               records:
                 Dataspace:
                   archived: $archived
             url_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "url"
               match_value: "(?P<url>.*)"
               records:
                 Dataspace:
                   url: $url
             coordinator_element:
-              type: DictDictElement
+              type: DictElement
               match_name: "coordinator"
               records:
                 Person:
@@ -64,70 +64,70 @@ Data:
                   Person: $Person
               subtree: &person_subtree
                 full_name_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "full_name"
                   match_value: "(?P<full_name>.*)"
                   records:
                     Person:
                       full_name: $full_name
                 full_name_nonlatin_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "full_name_nonlatin"
                   match_value: "(?P<full_name_nonlatin>.*)"
                   records:
                     Person:
                       full_name_nonlatin: $full_name_nonlatin
                 family_name_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "family_name"
                   match_value: "(?P<family_name>.*)"
                   records:
                     Person:
                       family_name: $family_name
                 given_name_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "given_name"
                   match_value: "(?P<given_name>.*)"
                   records:
                     Person:
                       given_name: $given_name
                 email_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "email"
                   match_value: "(?P<email>.*)"
                   records:
                     Person:
                       email: $email
                 affiliation_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "affiliation"
                   match_value: "(?P<affiliation>.*)"
                   records:
                     Person:
                       affiliation: $affiliation
                 ORCID_element:
-                  type: DictTextElement
+                  type: TextElement
                   match_name: "ORCID"
                   match_value: "(?P<ORCID>.*)"
                   records:
                     Person:
                       ORCID: $ORCID
             start_date_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "start_date"
               match_value: "(?P<start_date>.*)"
               records:
                 Dataspace:
                   start_date: $start_date
             end_date_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "end_date"
               match_value: "(?P<end_date>.*)"
               records:
                 Dataspace:
                   end_date: $end_date
             comment:
-              type: DictTextElement
+              type: TextElement
               match_name: "comment"
               match_value: "(?P<comment>.*)"
               records:
@@ -152,18 +152,18 @@ Data:
                   validate: schema/dataset.schema.json
                   subtree:
                     title_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "title"
                       match_value: "(?P<title>.*)"
                       records:
                         Dataset:
                           title: $title
                     authors_element:
-                      type: DictListElement
+                      type: ListElement
                       match_name: "authors"
                       subtree:
                         author_element:
-                          type: Dict
+                          type: DictElement
                           records:
                             Person:
                               parents:
@@ -172,21 +172,21 @@ Data:
                               authors: +$Person
                           subtree: *person_subtree
                     abstract_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "abstract"
                       match_value: "(?P<abstract>.*)"
                       records:
                         Dataset:
                           abstract: $abstract
                     comment_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "comment"
                       match_value: "(?P<comment>.*)"
                       records:
                         Dataset:
                           comment: $comment
                     license_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "license"
                       match_value: "(?P<license_name>.*)"
                       records:
@@ -199,31 +199,31 @@ Data:
                         Dataset:
                           license: $license
                     dataset_doi_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "dataset_doi"
                       match_value: "(?P<dataset_doi>.*)"
                       records:
                         Dataset:
                           dataset_doi: $dataset_doi
                     related_to_dois_element:
-                      type: DictListElement
+                      type: ListElement
                       match_name: "related_to_dois"
                       subtree:
                         related_to_doi_element:
                           type: TextElement
-                          match: "(?P<related_to_doi>).*"
+                          match_value: "(?P<related_to_doi>).*"
                           records:
                             Dataset:
                               related_to_dois: +$related_to_doi
                     Keywords_element:
-                      type: DictListElement
+                      type: ListElement
                       match_name: "Keyword"
                     Events_element:
-                      type: DictListElement
+                      type: ListElement
                       match_name: "Event"
                       subtree:
                         Event_element:
-                          type: Dict
+                          type: DictElement
                           records:
                             Event:
                               parents:
@@ -232,84 +232,84 @@ Data:
                               Event: +$Event
                           subtree:
                             label_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "label"
                               match_value: "(?P<label>.*)"
                               records:
                                 Event:
                                   label: $label
                             comment_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "comment"
                               match_value: "(?P<comment>.*)"
                               records:
                                 Event:
                                   comment: $comment
                             start_datetime_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: start_datetime
                               match_value: "(?P<start_datetime>.*)"
                               records:
                                 Event:
                                   start_datetime: $start_datetime
                             end_datetime_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: end_datetime
                               match_value: "(?P<end_datetime>.*)"
                               records:
                                 Event:
                                   end_datetime: $end_datetime
                             longitude_element:
-                              type: DictFloatElement
+                              type: FloatElement
                               match_name: "longitude"
                               match_value: "(?P<longitude>.*)"
                               records:
                                 Event:
                                   longitude: $longitude
                             latitude_element:
-                              type: DictFloatElement
+                              type: FloatElement
                               match_name: "latitude"
                               match_value: "(?P<latitude>.*)"
                               records:
                                 Event:
                                   latitude: $latitude
                             elevation_element:
-                              type: DictFloatElement
+                              type: FloatElement
                               match_name: "elevation"
                               match_value: "(?P<elevation>.*)"
                               records:
                                 Event:
                                   elevation: $elevation
                             location_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: location
                               match_value: "(?P<location>.*)"
                               records:
                                 Event:
                                   location: $location
                             igsn_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: igsn
                               match_value: "(?P<igsn>.*)"
                               records:
                                 Event:
                                   igsn: $igsn
                     events_in_data_element:
-                      type: DictBooleanElement
+                      type: BooleanElement
                       match_name: "events_in_data"
                       match_value: "(?P<events_in_data>.*)"
                       records:
                         Dataset:
                           events_in_data: $events_in_data
                     geojson_element:
-                      type: DictTextElement
+                      type: TextElement
                       match_name: "geojson"
                       match_value: "(?P<geojson>.*)"
                       records:
                         Dataset:
                           geojson: $geojson
                     project_element:
-                      type: DictDictElement
+                      type: DictElement
                       match_name: "project"
                       records:
                         Project:
@@ -319,28 +319,28 @@ Data:
                           Project: $Project
                       subtree:
                         name_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "name"
                           match_value: "(?P<name>.*)"
                           records:
                             Project:
                               name: $name
                         full_name_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "full_name"
                           match_value: "(?P<full_name>.*)"
                           records:
                             Project:
                               full_name: $full_name
                         project_id_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "project_id"
                           match_value: "(?P<project_id>.*)"
                           records:
                             Project:
                               project_id: $project_id
                         project_type_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "project_type"
                           match_value: "(?P<project_type_name>.*)"
                           records:
@@ -349,39 +349,39 @@ Data:
                             Project:
                               project_type: $project_type
                         institute_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "institute"
                           match_value: "(?P<institute>.*)"
                           records:
                             Project:
                               institute: $institute
                         start_date_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "start_date"
                           match_value: "(?P<start_date>.*)"
                           records:
                             Project:
                               start_date: $start_date
                         end_date_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "end_date"
                           match_value: "(?P<end_date>.*)"
                           records:
                             Project:
                               end_date: $end_date
                         url_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "url"
                           match_value: "(?P<url>.*)"
                           records:
                             Project:
                               url: $url
                         coordinators_element:
-                          type: DictListElement
+                          type: ListElement
                           match_name: "coordinators"
                           subtree:
                             coordinator_element:
-                              type: Dict
+                              type: DictElement
                               records:
                                 Person:
                                   parents:
@@ -390,7 +390,7 @@ Data:
                                   coordinators: +$Person
                               subtree: *person_subtree
                         campaign_element:
-                          type: DictDictElement
+                          type: DictElement
                           match_name: "campaign"
                           records:
                             Campaign:
@@ -400,39 +400,39 @@ Data:
                               Campaign: $Campaign
                           subtree:
                             label_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "label"
                               match_value: "(?P<label>.*)"
                               records:
                                 Campaign:
                                   label: $label
                             optional_label_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "optional_label"
                               match_value: "(?P<optional_label>.*)"
                               records:
                                 Campaign:
                                   optional_label: $optional_label
                             start_date_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "start_date"
                               match_value: "(?P<start_date>.*)"
                               records:
                                 Campaign:
                                   start_date: $start_date
                             end_date_element:
-                              type: DictTextElement
+                              type: TextElement
                               match_name: "end_date"
                               match_value: "(?P<end_date>.*)"
                               records:
                                 Campaign:
                                   end_date: $end_date
                             responsible_scientists_element:
-                              type: DictListElement
+                              type: ListElement
                               match_name: "responsible_scientists"
                               subtree:
                                 responsible_scientist_element:
-                                  type: Dict
+                                  type: DictElement
                                   records:
                                     Person:
                                       parents:
@@ -441,11 +441,11 @@ Data:
                                       responsible_scientists: +$Person
                                   subtree: *person_subtree
                         Methods_element:
-                          type: DictListElement
+                          type: ListElement
                           match_name: "Method"
                           subtree:
                             Method_element:
-                              type: Dict
+                              type: DictElement
                               records:
                                 Method:
                                   parents:
@@ -454,32 +454,32 @@ Data:
                                   Method: +$Method
                               subtree:
                                 method_name_element:
-                                  type: DictTextElement
+                                  type: TextElement
                                   match_name: "method_name"
                                   match_value: "(?P<method_name>.*)"
                                   records:
                                     Method:
                                       name: $method_name
                                 abbreviation_element:
-                                  type: DictTextElement
+                                  type: TextElement
                                   match_name: "abbreviation"
                                   match_value: "(?P<abbreviation>.*)"
                                   records:
                                     Method:
                                       abbreviation: $abbreviation
                                 url_element:
-                                  type: DictTextElement
+                                  type: TextElement
                                   match_name: "url"
                                   match_value: "(?P<url>.*)"
                                   records:
                                     Method:
                                       url: $url
                         Taxa_element:
-                          type: DictListElement
+                          type: ListElement
                           match_name: "Taxon"
                           subtree:
                             Taxon_element:
-                              type: Dict
+                              type: DictElement
                               records:
                                 Taxon:
                                   parents:
@@ -488,28 +488,28 @@ Data:
                                   Taxon: +$Taxon
                               subtree:
                                 taxon_name_element:
-                                  type: DictTextElement
+                                  type: TextElement
                                   match_name: "taxon_name"
                                   match_value: "(?P<taxon_name>.*)"
                                   records:
                                     Taxon:
                                       name: $taxon_name
                         archived_element:
-                          type: DictBooleanElement
+                          type: BooleanElement
                           match_name: "archived"
                           match_value: "(P<archived>.*)"
                           records:
                             Dataset:
                               archived: $archived
                         publication_date_element:
-                          type: DictTextElement
+                          type: TextElement
                           match_name: "publication_date"
                           match_value: "(P<publication_date>.*)"
                           records:
                             Dataset:
                               publication_date: $publication_date
                         max_files_element:
-                          type: DictIntegerElement
+                          type: IntegerElement
                           match_name: "max_files"
                           match_value: "(P<max_files>.*)"
                           records:
diff --git a/integrationtests/test_issues.py b/integrationtests/test_issues.py
new file mode 100644
index 0000000000000000000000000000000000000000..527b4c0cf67f483d5b61972a0104ff4fb673402d
--- /dev/null
+++ b/integrationtests/test_issues.py
@@ -0,0 +1,126 @@
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Indiscale GmbH <info@indiscale.com>
+#               2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+from pytest import fixture, mark
+
+import caosdb as db
+
+from caoscrawler.crawl import Crawler
+from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.structure_elements import DictElement
+
+from caosdb.utils.register_tests import clear_database, set_test_key
+set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+
+
+def test_issue_23(clear_database):
+    """Test that an update leaves existing properties, that were not found by
+    the crawler, unchanged.
+
+    See issue https://gitlab.com/caosdb/caosdb-crawler/-/issues/23
+
+    """
+
+    # insert a simplistic model an arecord of type TestType with identifying
+    # property and prop_a, but not prop_b.
+    prop_ident = db.Property(name="identifying_prop", datatype=db.TEXT)
+    prop_a = db.Property(name="prop_a", datatype=db.TEXT)
+    prop_b = db.Property(name="prop_b", datatype=db.TEXT)
+    rt = db.RecordType(name="TestType")
+    rec = db.Record(name="TestRec").add_parent(rt)
+    rec.add_property(name="identifying_prop", value="identifier")
+    rec.add_property(name="prop_a", value="something")
+    db.Container().extend([prop_ident, prop_a, prop_b, rt, rec]).insert()
+
+    # set up crawler, first cfood defining a TestType record with
+    # identifying_prop and prop_b, but not prop_a ...
+    crawler_definition = {
+        "DictTest": {
+            "type": "DictElement",
+            "match": "(.*)",
+            "records": {
+                "TestType": {}
+            },
+            "subtree": {
+                "identifying_element": {
+                    "type": "TextElement",
+                    "match_name": "ident",
+                    "match_value": "(?P<ident_value>.*)",
+                    "records": {
+                        "TestType": {
+                            "identifying_prop": "$ident_value"
+                        }
+                    }
+                },
+                "other_element": {
+                    "type": "TextElement",
+                    "match_name": "prop_b",
+                    "match_value": "(?P<other_value>.*)",
+                    "records": {
+                        "TestType": {
+                            "prop_b": "$other_value"
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    # register identifiable for TestType
+    ident = CaosDBIdentifiableAdapter()
+    ident.register_identifiable("TestType", db.RecordType().add_parent(
+        name="TestType").add_property(name="identifying_prop"))
+
+    crawler = Crawler(debug=True, identifiableAdapter=ident)
+    converter_registry = crawler.load_converters(crawler_definition)
+
+    # the dictionary to be crawled...
+    test_dict = {
+        "ident": "identifier",
+        "prop_b": "something_else"
+    }
+
+    records = crawler.start_crawling(
+        DictElement("TestDict", test_dict), crawler_definition, converter_registry)
+
+    assert len(records) == 1
+    rec_crawled = records[0]
+    assert rec_crawled.parents[0].name == "TestType"
+    assert rec_crawled.get_property("identifying_prop") is not None
+    assert rec_crawled.get_property("identifying_prop").value == "identifier"
+    assert rec_crawled.get_property("prop_b") is not None
+    assert rec_crawled.get_property("prop_b").value == "something_else"
+    # no interaction with the database yet, so the rrecord shouldn't have a prop_a yet
+    assert rec_crawled.get_property("prop_a") is None
+
+    # synchronize with database and update the record
+    ins, ups = crawler.synchronize()
+    assert len(ins) == 0
+    assert len(ups) == 1
+
+    # retrieve and check that name and properties have been combined correctly
+    rec_retrieved = db.Record(id=rec.id).retrieve()
+    assert rec_retrieved.name == rec.name
+    assert rec_retrieved.get_property(
+        "identifying_prop").value == rec.get_property("identifying_prop").value
+    assert rec_retrieved.get_property(
+        "prop_a").value == rec.get_property("prop_a").value
+    assert rec_retrieved.get_property(
+        "identifying_prop").value == rec_crawled.get_property("identifying_prop").value
+    assert rec_retrieved.get_property(
+        "prop_b").value == rec_crawled.get_property("prop_b").value
diff --git a/integrationtests/test_realworld_example.py b/integrationtests/test_realworld_example.py
index da3fb69ce635ae69cd33cbf01de9df8ebf019661..4158ed22278ef5c871a22d45885e58fbfa84ea3b 100644
--- a/integrationtests/test_realworld_example.py
+++ b/integrationtests/test_realworld_example.py
@@ -22,25 +22,23 @@
 #
 
 """
-module description
+an integration test module that runs a test against a (close to) real world example
 """
+from caosdb.utils.register_tests import clear_database, set_test_key
 import json
 import os
 
 import caosdb as db
 
 from caoscrawler.crawl import Crawler, crawler_main
-from caoscrawler.converters import JSONFileConverter, DictConverter
 from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
-from caoscrawler.structure_elements import File, JSONFile, Directory
+from caoscrawler.structure_elements import Directory
 import pytest
 from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
 
-#from caosadvancedtools.testutils import clear_database, set_test_key
 import sys
 
-# TODO is not yet merged in caosadvancedtools
-# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
+set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 
 
 def rfp(*pathcomponents):
@@ -83,19 +81,7 @@ def clear_database():
 
 def create_identifiable_adapter():
     ident = CaosDBIdentifiableAdapter()
-    ident.register_identifiable("license", (
-        db.RecordType()
-        .add_parent("license")
-        .add_property("name")))
-    ident.register_identifiable("project_type", (
-        db.RecordType()
-        .add_parent("project_type")
-        .add_property("name")))
-    ident.register_identifiable("Person", (
-        db.RecordType()
-        .add_parent("Person")
-        .add_property("full_name")))
-
+    ident.load_from_yaml_definition(os.path.join(DATADIR, "identifiables.yml"))
     return ident
 
 
@@ -131,6 +117,7 @@ def test_dataset(clear_database, usemodel):
                             "") == 1
     assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH "
                             "start_datetime='2022-02-10T16:36:48+01:00'") == 1
+    assert db.execute_query(f"FIND Event WITH latitude=53", unique=True)
 
 
 def test_event_update(clear_database, usemodel):
diff --git a/integrationtests/test_use_case_simple_presentation.py b/integrationtests/test_use_case_simple_presentation.py
index 60f771cfacb6a055d8539c185e17eb75118117fa..91c523be90a4d0117a7cc54217cae0b911511957 100644
--- a/integrationtests/test_use_case_simple_presentation.py
+++ b/integrationtests/test_use_case_simple_presentation.py
@@ -22,9 +22,6 @@
 # ** end header
 #
 
-"""
-module description
-"""
 import os
 import pytest
 from subprocess import run
@@ -33,23 +30,13 @@ import caosdb as db
 from caosadvancedtools.loadFiles import loadpath
 from caosadvancedtools.models import parser as parser
 from caoscrawler.crawl import crawler_main
+from caosdb.utils.register_tests import clear_database, set_test_key
 
-# TODO: wait for release of this feature in pylib
-# from caosdb.utils.register_tests import clear_database, set_test_key
-# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 
+set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 DATADIR = os.path.join(os.path.dirname(__file__), "test_data",
                        "extroot", "use_case_simple_presentation")
 
-# TODO: remove this
-@pytest.fixture
-def clear_database():
-    # TODO(fspreck): Remove once the corresponding advancedtools function can be
-    # used.
-    ents = db.execute_query("FIND ENTITY WITH ID>99")
-    if ents:
-        ents.delete()
-
 
 def test_complete_crawler(
         clear_database
diff --git a/release.sh b/release.sh
new file mode 100755
index 0000000000000000000000000000000000000000..1af097f014de6cd9eb3d3e8ba5da34aea0fe1671
--- /dev/null
+++ b/release.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+rm -rf dist/ build/ .eggs/
+python setup.py sdist bdist_wheel
+python -m twine upload -s dist/*
diff --git a/setup.cfg b/setup.cfg
index 0351d56dec59ee0b33c10be1f825e5d1d04f8504..77c546f110c67e3c3a8f44cb617a7f1b187813e1 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
 [metadata]
 name = caoscrawler
-version = 0.1
+version = 0.2.1
 author = Alexander Schlemmer
 author_email = alexander.schlemmer@ds.mpg.de
 description = A new crawler for caosdb
@@ -10,19 +10,19 @@ long_description_content_type = text/markdown
 # project_urls
 classifiers =
             Programming Language :: Python :: 3
-            License :: OSI Approved :: AGPLv3
+            License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
             Operating System :: OS Independent
 
 [options]
 package_dir =
             = src
 packages = find:
-python_requires = >=3.6
+python_requires = >=3.8
 install_requires =
 	importlib-resources
-	caosdb
-	caosadvancedtools
-    yaml-header-tools>=0.2.1
+	caosdb > 0.10.0
+	caosadvancedtools >= 0.6.0
+    yaml-header-tools >= 0.2.1
     pyyaml
     odfpy #make optional
     pandas
diff --git a/src/caoscrawler/cfood-schema.yml b/src/caoscrawler/cfood-schema.yml
index d7b5abfd1ac6c381b50bd4ce61015f1b8602b408..5e724c83695e098ce980e1aa8e81c65ae8525e19 100644
--- a/src/caoscrawler/cfood-schema.yml
+++ b/src/caoscrawler/cfood-schema.yml
@@ -16,10 +16,15 @@ cfood:
           - YamlFileCaosDBRecord
           - MarkdownFile
           - DictListElement
+          - ListElement
           - DictDictElement
+          - DictElement
           - DictFloatElement
+          - FloatElement
           - DictIntegerElement
+          - IntegerElement
           - DictBooleanElement
+          - BooleanElement
           - Definitions
           - Dict
           - JSONFile
diff --git a/src/caoscrawler/converters.py b/src/caoscrawler/converters.py
index 4be30e1a31abacc4905b20b9c62a61978c6d8847..1fb944db76a82351be7e4cfc5f7e850120d4b51f 100644
--- a/src/caoscrawler/converters.py
+++ b/src/caoscrawler/converters.py
@@ -31,11 +31,10 @@ import json
 import warnings
 from .utils import has_parent
 from .stores import GeneralStore, RecordStore
-from .structure_elements import (StructureElement, Directory, File, Dict, JSONFile,
-                                 DictIntegerElement, DictBooleanElement,
-                                 DictFloatElement, DictDictElement,
-                                 TextElement, DictTextElement, DictElement, DictListElement)
-from typing import Dict as Dict_t, List, Optional, Tuple, Union
+from .structure_elements import (StructureElement, Directory, File, DictElement, JSONFile,
+                                 IntegerElement, BooleanElement, FloatElement,
+                                 TextElement, TextElement, ListElement)
+from typing import List, Optional, Tuple, Union
 from abc import ABCMeta, abstractmethod
 from string import Template
 import yaml_header_tools
@@ -206,6 +205,10 @@ def create_records(values: GeneralStore,
             # additionally add the new record to the general store:
             values[name] = c_record
 
+            # add the "fallback" parent only for Records, not for Files:
+            if (role == "Record" and "parents" not in record):
+                c_record.add_parent(name)
+
         c_record = records[name]
 
         for key, value in record.items():
@@ -219,15 +222,15 @@ def create_records(values: GeneralStore,
             keys_modified.append((name, key))
             propvalue, collection_mode = handle_value(value, values)
 
-            if key in SPECIAL_PROPERTIES:
+            if key.lower() in SPECIAL_PROPERTIES:
                 # e.g. description, name, etc.
                 # list mode does not work for them
-                if key == "path" and not propvalue.startswith(os.path.sep):
+                if key.lower() == "path" and not propvalue.startswith(os.path.sep):
                     propvalue = os.path.sep + propvalue
 
                     # Convert relative to absolute paths:
                     propvalue = os.path.normpath(propvalue)
-                setattr(c_record, key, propvalue)
+                setattr(c_record, key.lower(), propvalue)
             else:
 
                 if c_record.get_property(key) is None:
@@ -253,12 +256,6 @@ def create_records(values: GeneralStore,
                 var_replaced_parent = replace_variables(parent, values)
                 if not has_parent(c_record, var_replaced_parent):
                     c_record.add_parent(var_replaced_parent)
-        else:
-            # add the "fallback" parent only for Records, not for Files:
-            if role == "Record":
-                # if not has_parent(c_record, name):
-                if len(c_record.parents) == 0:
-                    c_record.add_parent(name)
     return keys_modified
 
 
@@ -267,14 +264,12 @@ class Converter(object, metaclass=ABCMeta):
     Converters treat StructureElements contained in the hierarchical sturcture.
     """
 
-    def __init__(self, definition: dict,
-                 name: str,
-                 converter_registry: dict):
+    def __init__(self, definition: dict, name: str, converter_registry: dict):
         self.definition = definition
         self.name = name
 
         # Used to store usage information for debugging:
-        self.metadata: Dict_t[str, set[str]] = {
+        self.metadata: dict[str, set[str]] = {
             "usage": set()
         }
 
@@ -287,9 +282,7 @@ class Converter(object, metaclass=ABCMeta):
                     converter_definition, converter_name, converter_registry))
 
     @staticmethod
-    def converter_factory(definition: dict,
-                          name: str,
-                          converter_registry: dict):
+    def converter_factory(definition: dict, name: str, converter_registry: dict):
         """creates a Converter instance of the appropriate class.
 
         The `type` key in the `definition` defines the Converter class which is being used.
@@ -366,26 +359,29 @@ class Converter(object, metaclass=ABCMeta):
 
         filtered_children = FILTER_FUNCTIONS[rule](to_be_filtered)
 
-        return filtered_children+unmatched_children
+        return filtered_children + unmatched_children
 
     @abstractmethod
     def typecheck(self, element: StructureElement):
+        """
+        Check whether the current structure element can be converted using
+        this converter.
+        """
         pass
 
     @abstractmethod
     def match(self, element: StructureElement) -> Optional[dict]:
+        """
+        This method is used to implement detailed checks for matching compatibility
+        of the current structure element with this converter.
+
+        The return value is a dictionary providing possible matched variables from the
+        structure elements information.
+        """
         pass
 
 
 class DirectoryConverter(Converter):
-
-    def __init__(self, definition: dict, name: str,
-                 converter_registry: dict):
-        """
-        Initialize a new directory converter.
-        """
-        super().__init__(definition, name, converter_registry)
-
     def create_children(self, generalStore: GeneralStore,
                         element: StructureElement):
         if not isinstance(element, Directory):
@@ -405,6 +401,8 @@ class DirectoryConverter(Converter):
     def typecheck(self, element: StructureElement):
         return isinstance(element, Directory)
 
+    # TODO basically all converters implement such a match function. Shouldn't this be the one
+    # of the parent class and subclasses can overwrite if needed?
     def match(self, element: StructureElement):
         if not isinstance(element, Directory):
             raise RuntimeError("Element must be a directory.")
@@ -506,16 +504,21 @@ class YamlFileCaosDBRecord(Converter):
         return keys_modified
     
 
+class FileConverter(SimpleFileConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use SimpleFileConverter."))
+        super().__init__(*args, **kwargs)
+
+
 class MarkdownFileConverter(Converter):
-    def __init__(self, definition: dict, name: str,
-                          converter_registry: dict):
-        """
-        Initialize a new directory converter.
-        """
-        super().__init__(definition, name, converter_registry)
+    """
+    reads the yaml header of markdown files (if a such a header exists).
+    """
 
     def create_children(self, generalStore: GeneralStore,
                         element: StructureElement):
+        # TODO: isn't the type check sufficient?
         if not isinstance(element, File):
             raise RuntimeError("A markdown file is needed to create children.")
 
@@ -525,9 +528,9 @@ class MarkdownFileConverter(Converter):
 
         for name, entry in header.items():
             if type(entry) == list:
-                children.append(DictListElement(name, entry))
+                children.append(ListElement(name, entry))
             elif type(entry) == str:
-                children.append(DictTextElement(name, entry))
+                children.append(TextElement(name, entry))
             else:
                 raise RuntimeError(
                     "Header entry {} has incompatible type.".format(name))
@@ -537,6 +540,7 @@ class MarkdownFileConverter(Converter):
         return isinstance(element, File)
 
     def match(self, element: StructureElement):
+        # TODO: isn't the type check sufficient?
         if not isinstance(element, File):
             raise RuntimeError("Element must be a file.")
         m = re.match(self.definition["match"], element.name)
@@ -551,7 +555,7 @@ class MarkdownFileConverter(Converter):
         return m.groupdict()
 
 
-class DictConverter(Converter):
+class DictElementConverter(Converter):
     # TODO use Dict as typecheck?
     def create_children(self, generalStore: GeneralStore, element: StructureElement):
         if not self.typecheck(element):
@@ -564,17 +568,17 @@ class DictConverter(Converter):
 
         for name, value in data.items():
             if type(value) == list:
-                children.append(DictListElement(name, value))
+                children.append(ListElement(name, value))
             elif type(value) == str:
-                children.append(DictTextElement(name, value))
+                children.append(TextElement(name, value))
             elif type(value) == dict:
-                children.append(DictDictElement(name, value))
+                children.append(DictElement(name, value))
             elif type(value) == int:
-                children.append(DictIntegerElement(name, value))
+                children.append(IntegerElement(name, value))
             elif type(value) == bool:
-                children.append(DictBooleanElement(name, value))
+                children.append(BooleanElement(name, value))
             elif type(value) == float:
-                children.append(DictFloatElement(name, value))
+                children.append(FloatElement(name, value))
             elif type(value) == type(None):
                 continue
             else:
@@ -586,35 +590,32 @@ class DictConverter(Converter):
 
     # TODO use Dict as typecheck?
     def typecheck(self, element: StructureElement):
-        return isinstance(element, Dict)
+        return isinstance(element, DictElement)
 
     def match(self, element: StructureElement):
         """
         Allways matches if the element has the right type.
         """
-        if not isinstance(element, Dict):
+        if not isinstance(element, DictElement):
             raise RuntimeError("Element must be a DictElement.")
-        return {}
+        return match_name_and_value(self.definition, element.name, element.value)
 
 
-# TODO: difference to SimpleFileConverter? Do we need both?
-class FileConverter(Converter):
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, File)
+class DictConverter(DictElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use DictConverter."))
+        super().__init__(*args, **kwargs)
 
-    def match(self, element: StructureElement):
-        if not self.typecheck(element):
-            raise RuntimeError("Element must be a file")
-        m = re.match(self.definition["match"], element.name)
-        if m is None:
-            return None
-        return m.groupdict()
 
-    def create_children(self, generalStore: GeneralStore, element: StructureElement):
-        return []
+class DictDictElementConverter(DictElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use DictElementConverter."))
+        super().__init__(*args, **kwargs)
 
 
-class JSONFileConverter(DictConverter):
+class JSONFileConverter(DictElementConverter):
     def typecheck(self, element: StructureElement):
         return isinstance(element, File)
 
@@ -629,7 +630,7 @@ class JSONFileConverter(DictConverter):
     def create_children(self, generalStore: GeneralStore, element: StructureElement):
         if not self.typecheck(element):
             raise RuntimeError("A JSON file is needed to create children")
-        # TODO: either add explicit time check for File structure element here,
+        # TODO: either add explicit type check for File structure element here,
         #       or add a comment to suppress mypy type warning.
         with open(element.path, 'r') as json_file:
             json_data = json.load(json_file)
@@ -656,13 +657,75 @@ class JSONFileConverter(DictConverter):
         return self._create_children_from_dict(json_data)
 
 
-class _AbstractDictElementConverter(Converter):
+def match_name_and_value(definition, name, value):
+    """
+    takes match definitions from the definition argument and applies regular expressiion to name
+    and possibly value
+
+    one of the keys 'match_name' and "match' needs to be available in definition
+    'match_value' is optional
+
+    Returns None, if match_name or match lead to no match. Otherwise, returns a dictionary with the
+        matched groups, possibly including matches from using match_value
+    """
+    if "match_name" in definition:
+        if "match" in definition:
+            raise RuntimeError(f"Do not supply both, 'match_name' and 'match'.")
+
+        m1 = re.match(definition["match_name"], name)
+        if m1 is None:
+            return None
+        else:
+            m1 = m1.groupdict()
+    elif "match" in definition:
+        m1 = re.match(definition["match"], name)
+        if m1 is None:
+            return None
+        else:
+            m1 = m1.groupdict()
+    else:
+        m1 = {}
+
+    if "match_value" in definition:
+        m2 = re.match(definition["match_value"], str(value), re.DOTALL)
+        if m2 is None:
+            return None
+        else:
+            m2 = m2.groupdict()
+    else:
+        m2 = {}
+
+    values = dict()
+    values.update(m1)
+    values.update(m2)
+    return values
+
+
+class _AbstractScalarValueElementConverter(Converter):
+    """
+    A base class for all converters that have a scalar value that can be matched using a regular
+    expression.
+
+    values must have one of the following type: str, bool, int, float
+    """
+
+    default_matches = {
+        "accept_text": False,
+        "accept_bool": False,
+        "accept_int": False,
+        "accept_float": False,
+    }
 
     def create_children(self, generalStore: GeneralStore, element: StructureElement):
         return []
 
     def typecheck(self, element: StructureElement):
-        return True
+        """
+        returns whether the type of StructureElement is accepted by this converter instance.
+        """
+        allowed_matches = self._merge_match_definition_with_default(self.default_matches,
+                                                                    self.definition)
+        return self._typecheck(element, allowed_matches)
 
     def match(self, element: StructureElement):
         """
@@ -676,42 +739,122 @@ class _AbstractDictElementConverter(Converter):
         if not self.typecheck(element):
             raise RuntimeError(
                 f"Element has an invalid type: {type(element)}.")
-        m1 = re.match(self.definition["match_name"], element.name)
-        if m1 is None:
-            return None
-        m2 = re.match(self.definition["match_value"], str(element.value))
-        if m2 is None:
-            return None
-        values = dict()
-        values.update(m1.groupdict())
-        values.update(m2.groupdict())
-        return values
+        return match_name_and_value(self.definition, element.name, element.value)
 
+    def _typecheck(self, element: StructureElement, allowed_matches: dict):
+        """
+        returns whether the type of StructureElement is accepted.
 
-class DictBooleanElementConverter(_AbstractDictElementConverter):
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, DictBooleanElement)
+        Parameters:
+        element: StructureElement, the element that is checked
+        allowed_matches: Dict, a dictionary that defines what types are allowed. It must have the
+                         keys 'accept_text', 'accept_bool', 'accept_int', and 'accept_float'.
 
+        returns:  whether or not the converter allows the type of element
+        """
+        if (bool(allowed_matches["accept_text"]) and isinstance(element, TextElement)):
+            return True
+        elif (bool(allowed_matches["accept_bool"]) and isinstance(element, BooleanElement)):
+            return True
+        elif (bool(allowed_matches["accept_int"]) and isinstance(element, IntegerElement)):
+            return True
+        elif (bool(allowed_matches["accept_float"]) and isinstance(element, FloatElement)):
+            return True
+        else:
+            return False
 
-class DictFloatElementConverter(_AbstractDictElementConverter):
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, DictFloatElement)
+    def _merge_match_definition_with_default(self, default: dict, definition: dict):
+        """
+        returns a dict with the same keys as default dict but with updated values from definition
+        where it has the same keys
+        """
 
+        result = {}
+        for key in default:
+            if key in definition:
+                result[key] = definition[key]
+            else:
+                result[key] = default[key]
+        return result
 
-class DictTextElementConverter(_AbstractDictElementConverter):
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, DictTextElement)
 
+class BooleanElementConverter(_AbstractScalarValueElementConverter):
+    default_matches = {
+        "accept_text": False,
+        "accept_bool": True,
+        "accept_int": True,
+        "accept_float": False,
+    }
+
+
+class DictBooleanElementConverter(BooleanElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use BooleanElementConverter."))
+        super().__init__(*args, **kwargs)
+
+
+class FloatElementConverter(_AbstractScalarValueElementConverter):
+    default_matches = {
+        "accept_text": False,
+        "accept_bool": False,
+        "accept_int": True,
+        "accept_float": True,
+    }
+
+
+class DictFloatElementConverter(FloatElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use FloatElementConverter."))
+        super().__init__(*args, **kwargs)
+
+
+class TextElementConverter(_AbstractScalarValueElementConverter):
+    default_matches = {
+        "accept_text": True,
+        "accept_bool": True,
+        "accept_int": True,
+        "accept_float": True,
+    }
+
+    def __init__(self, definition, *args, **kwargs):
+        if "match" in definition:
+            raise ValueError("""
+The 'match' key will in future be used to match a potential name of a TextElement. Please use
+the 'match_value' key to match the value of the TextElement and 'match_name' for matching the name.
+""")
+
+        super().__init__(definition, *args, **kwargs)
+
+
+class DictTextElementConverter(TextElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use TextElementConverter."))
+        super().__init__(*args, **kwargs)
+
+
+class IntegerElementConverter(_AbstractScalarValueElementConverter):
+    default_matches = {
+        "accept_text": False,
+        "accept_bool": False,
+        "accept_int": True,
+        "accept_float": False,
+    }
 
-class DictIntegerElementConverter(_AbstractDictElementConverter):
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, DictIntegerElement)
 
+class DictIntegerElementConverter(IntegerElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use IntegerElementConverter."))
+        super().__init__(*args, **kwargs)
 
-class DictListElementConverter(Converter):
+
+class ListElementConverter(Converter):
     def create_children(self, generalStore: GeneralStore,
                         element: StructureElement):
-        if not isinstance(element, DictListElement):
+        if not isinstance(element, ListElement):
             raise RuntimeError(
                 "This converter can only process DictListElements.")
         children = []
@@ -720,64 +863,34 @@ class DictListElementConverter(Converter):
             if isinstance(list_element, str):
                 children.append(TextElement(str(index), list_element))
             elif isinstance(list_element, dict):
-                children.append(Dict(str(index), list_element))
+                children.append(DictElement(str(index), list_element))
+            elif isinstance(list_element, StructureElement):
+                children.append(list_element)
             else:
                 raise NotImplementedError(
                     f"Unkown type {type(list_element)} in list element {list_element}.")
         return children
 
     def typecheck(self, element: StructureElement):
-        return isinstance(element, DictListElement)
+        return isinstance(element, ListElement)
 
     def match(self, element: StructureElement):
-        if not isinstance(element, DictListElement):
-            raise RuntimeError("Element must be a DictListElement.")
+        if not isinstance(element, ListElement):
+            raise RuntimeError("Element must be a ListElement.")
         m = re.match(self.definition["match_name"], element.name)
         if m is None:
             return None
         if "match" in self.definition:
             raise NotImplementedError(
-                "Match is not implemented for DictListElement.")
+                "Match is not implemented for ListElement.")
         return m.groupdict()
 
 
-class DictDictElementConverter(DictConverter):
-    def create_children(self, generalStore: GeneralStore, element: StructureElement):
-        if not self.typecheck(element):
-            raise RuntimeError("A dict is needed to create children")
-
-        return self._create_children_from_dict(element.value)
-
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, DictDictElement)
-
-    def match(self, element: StructureElement):
-        if not self.typecheck(element):
-            raise RuntimeError("Element must be a DictDictElement.")
-        m = re.match(self.definition["match_name"], element.name)
-        if m is None:
-            return None
-        if "match" in self.definition:
-            raise NotImplementedError(
-                "Match is not implemented for DictDictElement.")
-        return m.groupdict()
-
-
-class TextElementConverter(Converter):
-    def create_children(self, generalStore: GeneralStore,
-                        element: StructureElement):
-        return []
-
-    def typecheck(self, element: StructureElement):
-        return isinstance(element, TextElement)
-
-    def match(self, element: StructureElement):
-        if not isinstance(element, TextElement):
-            raise RuntimeError("Element must be a TextElement.")
-        m = re.match(self.definition["match"], element.value)
-        if m is None:
-            return None
-        return m.groupdict()
+class DictListElementConverter(ListElementConverter):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning(
+            "This class is depricated. Please use ListElementConverter."))
+        super().__init__(*args, **kwargs)
 
 
 class TableConverter(Converter):
@@ -785,10 +898,10 @@ class TableConverter(Converter):
     This converter reads tables in different formats line by line and
     allows matching the corresponding rows.
 
-    The subtree generated by the table converter consists of DictDictElements, each being
+    The subtree generated by the table converter consists of DictElements, each being
     a row. The corresponding header elements will become the dictionary keys.
 
-    The rows can be matched using a DictDictElementConverter.
+    The rows can be matched using a DictElementConverter.
     """
     @abstractmethod
     def get_options(self):
@@ -804,7 +917,8 @@ class TableConverter(Converter):
             if opt_name in self.definition:
                 el = self.definition[opt_name]
                 # The option can often either be a single value or a list of values.
-                # In the latter case each element of the list will be converted to the defined type.
+                # In the latter case each element of the list will be converted to the defined
+                # type.
                 if isinstance(el, list):
                     option_dict[opt_name] = [
                         opt_conversion(el_el) for el_el in el]
@@ -848,7 +962,7 @@ class XLSXTableConverter(TableConverter):
         child_elements = list()
         for index, row in table.iterrows():
             child_elements.append(
-                DictDictElement(str(index), row.to_dict()))
+                DictElement(str(index), row.to_dict()))
         return child_elements
 
 
@@ -876,5 +990,5 @@ class CSVTableConverter(TableConverter):
         child_elements = list()
         for index, row in table.iterrows():
             child_elements.append(
-                DictDictElement(str(index), row.to_dict()))
+                DictElement(str(index), row.to_dict()))
         return child_elements
diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 77b737defedbf24371026e6ff0957fab03f1ed4a..7fed3cb7362ebc077bedfd401bcf5569a9f0bd0c 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -28,35 +28,42 @@ Crawl a file structure using a yaml cfood definition and synchronize
 the acuired data with CaosDB.
 """
 
+from __future__ import annotations
+
+import argparse
 import importlib
-from caosadvancedtools.cache import UpdateCache, Cache
-import uuid
-import sys
+import logging
 import os
+import sys
+import uuid
+import warnings
 import yaml
+
+from argparse import RawTextHelpFormatter
+from collections import defaultdict
+from copy import deepcopy
 from enum import Enum
-import logging
 from importlib_resources import files
-import argparse
-from argparse import RawTextHelpFormatter
+from jsonschema import validate
+from typing import Any, Optional, Type, Union
+
 import caosdb as db
+
+from caosadvancedtools.cache import UpdateCache, Cache
 from caosadvancedtools.crawler import Crawler as OldCrawler
+from caosdb.apiutils import (compare_entities, EntityMergeConflictError,
+                             merge_entities)
 from caosdb.common.datatype import is_reference
-from .stores import GeneralStore, RecordStore
-from .identified_cache import IdentifiedCache
-from .structure_elements import StructureElement, Directory
+
 from .converters import Converter, DirectoryConverter
+from .identifiable import Identifiable
 from .identifiable_adapters import (IdentifiableAdapter,
                                     LocalStorageIdentifiableAdapter,
                                     CaosDBIdentifiableAdapter)
-from collections import defaultdict
-from typing import Any, Dict, List, Optional, Type, Union
-from caosdb.apiutils import compare_entities, merge_entities
-from copy import deepcopy
-from jsonschema import validate
-
-from caosdb.high_level_api import convert_to_python_object
+from .identified_cache import IdentifiedCache
 from .macros import defmacro_constructor, macro_constructor
+from .stores import GeneralStore, RecordStore
+from .structure_elements import StructureElement, Directory
 
 logger = logging.getLogger(__name__)
 
@@ -114,7 +121,7 @@ def check_identical(record1: db.Entity, record2: db.Entity, ignore_id=False):
             return False
         for attribute in ("datatype", "importance", "unit"):
             # only make an update for those attributes if there is a value difference and
-            # the value in the target_data is not None
+            # the value in the crawled_data is not None
             if attribute in comp[0]["properties"][key]:
                 attr_val = comp[0]["properties"][key][attribute]
                 other_attr_val = (comp[1]["properties"][key][attribute]
@@ -169,7 +176,7 @@ class Crawler(object):
                  generalStore: Optional[GeneralStore] = None,
                  debug: bool = False,
                  identifiableAdapter: IdentifiableAdapter = None,
-                 securityMode: int = SecurityMode.UPDATE
+                 securityMode: SecurityMode = SecurityMode.UPDATE
                  ):
         """
         Create a new crawler and initialize an empty RecordStore and GeneralStore.
@@ -194,9 +201,11 @@ class Crawler(object):
              Please use SecurityMode Enum
         """
 
-        # TODO: check if this feature is really needed
-
-        self.identified_cache = IdentifiedCache()
+        # The following caches store records, where we checked whether they exist on the remote
+        # server. Since, it is important to know whether they exist or not, we store them into two
+        # different caches.
+        self.remote_existing_cache = IdentifiedCache()
+        self.remote_missing_cache = IdentifiedCache()
         self.recordStore = RecordStore()
         self.securityMode = securityMode
 
@@ -204,18 +213,18 @@ class Crawler(object):
         if generalStore is None:
             self.generalStore = GeneralStore()
 
-        self.identifiableAdapter = identifiableAdapter
-        if identifiableAdapter is None:
-            self.identifiableAdapter = LocalStorageIdentifiableAdapter()
+        self.identifiableAdapter: IdentifiableAdapter = LocalStorageIdentifiableAdapter()
+        if identifiableAdapter is not None:
+            self.identifiableAdapter = identifiableAdapter
         # If a directory is crawled this may hold the path to that directory
-        self.crawled_directory = None
+        self.crawled_directory: Optional[str] = None
         self.debug = debug
         if self.debug:
             # order in the tuple:
             # 0: generalStore
             # 1: recordStore
-            self.debug_tree: Dict[str, tuple] = dict()
-            self.debug_metadata: Dict[str, dict] = dict()
+            self.debug_tree: dict[str, tuple] = dict()
+            self.debug_metadata: dict[str, dict] = dict()
             self.debug_metadata["copied"] = dict()
             self.debug_metadata["provenance"] = defaultdict(lambda: dict())
             self.debug_metadata["usage"] = defaultdict(lambda: set())
@@ -235,7 +244,7 @@ class Crawler(object):
 
         return self._resolve_validator_paths(crawler_definition, crawler_definition_path)
 
-    def _load_definition_from_yaml_dict(self, crawler_definitions: List[Dict]):
+    def _load_definition_from_yaml_dict(self, crawler_definitions: list[dict]):
         """Load crawler definitions from a list of (yaml) dicts `crawler_definitions` which
         contains either one or two documents.
 
@@ -257,7 +266,7 @@ class Crawler(object):
         #       tested in the next lines of code:
 
         # Load the cfood schema:
-        with open(files('caoscrawler').joinpath('cfood-schema.yml'), "r") as f:
+        with open(str(files('caoscrawler').joinpath('cfood-schema.yml')), "r") as f:
             schema = yaml.safe_load(f)
 
         # Add custom converters to converter enum in schema:
@@ -314,7 +323,7 @@ class Crawler(object):
         """
 
         # Defaults for the converter registry:
-        converter_registry: Dict[str, Dict[str, str]] = {
+        converter_registry: dict[str, dict[str, str]] = {
             "Directory": {
                 "converter": "DirectoryConverter",
                 "package": "caoscrawler.converters"},
@@ -328,7 +337,7 @@ class Crawler(object):
                 "converter": "MarkdownFileConverter",
                 "package": "caoscrawler.converters"},
             "File": {
-                "converter": "FileConverter",
+                "converter": "SimpleFileConverter",
                 "package": "caoscrawler.converters"},
             "JSONFile": {
                 "converter": "JSONFileConverter",
@@ -339,35 +348,56 @@ class Crawler(object):
             "XLSXTableConverter": {
                 "converter": "XLSXTableConverter",
                 "package": "caoscrawler.converters"},
-            "Dict": {
-                "converter": "DictConverter",
-                "package": "caoscrawler.converters"},
             "DictBooleanElement": {
-                "converter": "DictBooleanElementConverter",
+                "converter": "BooleanElementConverter",
+                "package": "caoscrawler.converters"},
+            "BooleanElement": {
+                "converter": "BooleanElementConverter",
                 "package": "caoscrawler.converters"},
             "DictFloatElement": {
-                "converter": "DictFloatElementConverter",
+                "converter": "FloatElementConverter",
+                "package": "caoscrawler.converters"},
+            "FloatElement": {
+                "converter": "FloatElementConverter",
                 "package": "caoscrawler.converters"},
             "DictTextElement": {
-                "converter": "DictTextElementConverter",
+                "converter": "TextElementConverter",
+                "package": "caoscrawler.converters"},
+            "TextElement": {
+                "converter": "TextElementConverter",
                 "package": "caoscrawler.converters"},
             "DictIntegerElement": {
-                "converter": "DictIntegerElementConverter",
+                "converter": "IntegerElementConverter",
+                "package": "caoscrawler.converters"},
+            "IntegerElement": {
+                "converter": "IntegerElementConverter",
                 "package": "caoscrawler.converters"},
             "DictListElement": {
-                "converter": "DictListElementConverter",
+                "converter": "ListElementConverter",
+                "package": "caoscrawler.converters"},
+            "ListElement": {
+                "converter": "ListElementConverter",
                 "package": "caoscrawler.converters"},
             "DictDictElement": {
-                "converter": "DictDictElementConverter",
+                "converter": "DictElementConverter",
+                "package": "caoscrawler.converters"},
+            "DictElement": {
+                "converter": "DictElementConverter",
+                "package": "caoscrawler.converters"},
+            "Dict": {
+                "converter": "DictElementConverter",
                 "package": "caoscrawler.converters"},
-            "TextElement": {
-                "converter": "TextElementConverter",
-                "package": "caoscrawler.converters"}
         }
 
         # More converters from definition file:
         if "Converters" in definition:
             for key, entry in definition["Converters"].items():
+                if key in ["Dict", "DictTextElement", "DictIntegerElement", "DictBooleanElement",
+                           "DictDictElement", "DictListElement", "DictFloatElement"]:
+                    warnings.warn(DeprecationWarning(f"{key} is deprecated. Please use the new"
+                                                     " variant; without 'Dict' prefix or "
+                                                     "'DictElement' in case of 'Dict'"))
+
                 converter_registry[key] = {
                     "converter": entry["converter"],
                     "package": entry["package"]
@@ -432,7 +462,7 @@ class Crawler(object):
 
         return converters
 
-    def start_crawling(self, items: Union[List[StructureElement], StructureElement],
+    def start_crawling(self, items: Union[list[StructureElement], StructureElement],
                        crawler_definition: dict,
                        converter_registry: dict):
         """
@@ -449,7 +479,7 @@ class Crawler(object):
 
         Returns
         -------
-        target_data : list
+        crawled_data : list
             the final list with the target state of Records.
         """
 
@@ -464,15 +494,16 @@ class Crawler(object):
         self.run_id = uuid.uuid1()
         local_converters = Crawler.initialize_converters(
             crawler_definition, converter_registry)
+
         # This recursive crawling procedure generates the update list:
-        self.target_data: List[db.Record] = []
+        self.crawled_data: list[db.Record] = []
         self._crawl(items, local_converters, self.generalStore,
                     self.recordStore, [], [])
 
         if self.debug:
             self.debug_converters = local_converters
 
-        return self.target_data
+        return self.crawled_data
 
     def synchronize(self, commit_changes: bool = True, unique_names=True):
         """
@@ -482,34 +513,43 @@ class Crawler(object):
         # After the crawling, the actual synchronization with the database, based on the
         # update list is carried out:
 
-        return self._synchronize(self.target_data, commit_changes, unique_names=unique_names)
+        return self._synchronize(self.crawled_data, commit_changes, unique_names=unique_names)
 
-    def can_be_checked_externally(self, record: db.Record):
+    def _has_reference_value_without_id(self, ident: Identifiable) -> bool:
         """
-        Returns False if there is at least one property in record which:
+        Returns True if there is at least one value in the properties attribute of ``ident`` which:
+
         a) is a reference property AND
-        b) where the value is set to a db.Entity (instead of an ID) AND
-        c) where the ID of the value is not set (to an integer)
+        b) where the value is set to a
+           :external+caosdb-pylib:py:class:`db.Entity <caosdb.common.models.Entity>`
+           (instead of an ID) AND
+        c) where the ID of the value (the
+           :external+caosdb-pylib:py:class:`db.Entity <caosdb.common.models.Entity>` object in b))
+           is not set (to an integer)
+
+        Returns
+        -------
+        bool
+            True if there is a value without id (see above)
 
-        Returns True otherwise.
+        Raises
+        ------
+        ValueError
+            If no Identifiable is given.
         """
-        for p in record.properties:
-            if isinstance(p.value, list):
-                for el in p.value:
+        if ident is None:
+            raise ValueError("Identifiable has to be given as argument")
+        for pvalue in list(ident.properties.values()) + ident.backrefs:
+            if isinstance(pvalue, list):
+                for el in pvalue:
                     if isinstance(el, db.Entity) and el.id is None:
-                        return False
-            # TODO: please check!
-            #       I removed the condition "is_reference", because the datatype field
-            #       that is checked within this function is not always present for references
-            #       parsed from the file structure. We have to rely on the condition, that
-            #       if a property value is of type entity, it can be assumed to be a reference.
-            # elif (is_reference(p) and isinstance(p.value, db.Entity)
-            #         and p.value.id is None):
-            elif isinstance(p.value, db.Entity) and p.value.id is None:
-                return False
-        return True
+                        return True
+            elif isinstance(pvalue, db.Entity) and pvalue.id is None:
+                return True
+        return False
 
-    def create_flat_list(self, ent_list: List[db.Entity], flat: List[db.Entity]):
+    @staticmethod
+    def create_flat_list(ent_list: list[db.Entity], flat: list[db.Entity]):
         """
         Recursively adds all properties contained in entities from ent_list to
         the output list flat. Each element will only be added once to the list.
@@ -525,34 +565,35 @@ class Crawler(object):
                         if isinstance(el, db.Entity):
                             if el not in flat:
                                 flat.append(el)
-                            # TODO: move inside if block?
-                            self.create_flat_list([el], flat)
+                                Crawler.create_flat_list([el], flat)
                 elif isinstance(p.value, db.Entity):
                     if p.value not in flat:
                         flat.append(p.value)
-                    # TODO: move inside if block?
-                    self.create_flat_list([p.value], flat)
+                        Crawler.create_flat_list([p.value], flat)
 
-    def all_references_are_existing_already(self, record: db.Record):
+    def _has_missing_object_in_references(self, ident: Identifiable, referencing_entities: list):
         """
-        returns true if all references either have IDs or were checked remotely and not found (i.e.
-        they exist in the local cache)
+        returns False if any value in the properties attribute is a db.Entity object that
+        is contained in the `remote_missing_cache`. If ident has such an object in
+        properties, it means that it references another Entity, where we checked
+        whether it exists remotely and it was not found.
         """
-        for p in record.properties:
-            # if (is_reference(p)
+        if ident is None:
+            raise ValueError("Identifiable has to be given as argument")
+        for pvalue in list(ident.properties.values()) + ident.backrefs:
             # Entity instead of ID and not cached locally
-            if (isinstance(p.value, list)):
-                for el in p.value:
-                    if (isinstance(el, db.Entity) and el.id is None
-                            and self.get_identified_record_from_local_cache(el) is None):
-                        return False
-            if (isinstance(p.value, db.Entity) and p.value.id is None
-                    and self.get_identified_record_from_local_cache(p.value) is None):
+            if (isinstance(pvalue, list)):
+                for el in pvalue:
+                    if (isinstance(el, db.Entity) and self.get_from_remote_missing_cache(
+                            self.identifiableAdapter.get_identifiable(el, referencing_entities)) is not None):
+                        return True
+            if (isinstance(pvalue, db.Entity) and self.get_from_remote_missing_cache(
+                    self.identifiableAdapter.get_identifiable(pvalue, referencing_entities)) is not None):
                 # might be checked when reference is resolved
-                return False
-        return True
+                return True
+        return False
 
-    def replace_references_with_cached(self, record: db.Record):
+    def replace_references_with_cached(self, record: db.Record, referencing_entities: list):
         """
         Replace all references with the versions stored in the cache.
 
@@ -563,8 +604,8 @@ class Crawler(object):
                 lst = []
                 for el in p.value:
                     if (isinstance(el, db.Entity) and el.id is None):
-                        cached = self.get_identified_record_from_local_cache(
-                            el)
+                        cached = self.get_from_any_cache(
+                            self.identifiableAdapter.get_identifiable(el, referencing_entities))
                         if cached is None:
                             raise RuntimeError("Not in cache.")
                         if not check_identical(cached, el, True):
@@ -578,7 +619,8 @@ class Crawler(object):
                         lst.append(el)
                 p.value = lst
             if (isinstance(p.value, db.Entity) and p.value.id is None):
-                cached = self.get_identified_record_from_local_cache(p.value)
+                cached = self.get_from_any_cache(
+                    self.identifiableAdapter.get_identifiable(p.value, referencing_entities))
                 if cached is None:
                     raise RuntimeError("Not in cache.")
                 if not check_identical(cached, p.value, True):
@@ -589,67 +631,117 @@ class Crawler(object):
                         raise RuntimeError("Not identical.")
                 p.value = cached
 
-    def get_identified_record_from_local_cache(self, record: db.Record):
+    def get_from_remote_missing_cache(self, identifiable: Identifiable):
         """
-        returns the identifiable if an identifiable with the same values already exists locally
+        returns the identified record if an identifiable with the same values already exists locally
         (Each identifiable that is not found on the remote server, is 'cached' locally to prevent
         that the same identifiable exists twice)
         """
-        if self.identifiableAdapter is None:
-            raise RuntimeError("Should not happen.")
-        identifiable = self.identifiableAdapter.get_identifiable(record)
         if identifiable is None:
-            # TODO: check whether the same idea as below works here
-            identifiable = record
-            # return None
+            raise ValueError("Identifiable has to be given as argument")
 
-        if identifiable in self.identified_cache:
-            return self.identified_cache[identifiable]
+        if identifiable in self.remote_missing_cache:
+            return self.remote_missing_cache[identifiable]
         else:
             return None
 
-    def add_identified_record_to_local_cache(self, record: db.Record):
+    def get_from_any_cache(self, identifiable: Identifiable):
         """
-        adds the given identifiable to the local cache
-
-        No identifiable with the same values must exist locally.
+        returns the identifiable if an identifiable with the same values already exists locally
         (Each identifiable that is not found on the remote server, is 'cached' locally to prevent
         that the same identifiable exists twice)
-
-        Return False if there is no identifiable for this record and True otherwise.
         """
-        if self.identifiableAdapter is None:
-            raise RuntimeError("Should not happen.")
-        identifiable = self.identifiableAdapter.get_identifiable(record)
         if identifiable is None:
-            # TODO: this error report is bad
-            #       we need appropriate handling for records without an identifiable
-            #       or at least a simple fallback definition if tehre is no identifiable.
+            raise ValueError("Identifiable has to be given as argument")
+
+        if identifiable in self.remote_existing_cache:
+            return self.remote_existing_cache[identifiable]
+        elif identifiable in self.remote_missing_cache:
+            return self.remote_missing_cache[identifiable]
+        else:
+            return None
+
+    def add_to_remote_missing_cache(self, record: db.Record, identifiable: Identifiable):
+        """
+        stores the given Record in the remote_missing_cache.
 
-            # print(record)
-            # raise RuntimeError("No identifiable for record.")
+        If identifiable is None, the Record is NOT stored.
+        """
+        self.add_to_cache(record=record, cache=self.remote_missing_cache,
+                          identifiable=identifiable)
 
-            # TODO: check whether that holds:
-            #       if there is no identifiable, for the cache that is the same
-            #       as if the complete entity is the identifiable:
-            identifiable = record
-        self.identified_cache.add(identifiable=identifiable, record=record)
+    def add_to_remote_existing_cache(self, record: db.Record, identifiable: Identifiable):
+        """
+        stores the given Record in the remote_existing_cache.
 
-    def copy_attributes(self, fro: db.Entity, to: db.Entity):
+        If identifiable is None, the Record is NOT stored.
         """
-        Copy all attributes from one entity to another entity.
+        self.add_to_cache(record=record, cache=self.remote_existing_cache,
+                          identifiable=identifiable)
+
+    def add_to_cache(self, record: db.Record, cache: IdentifiedCache,
+                     identifiable: Identifiable) -> None:
         """
+        stores the given Record in the given cache.
 
-        merge_entities(to, fro)
+        If identifiable is None, the Record is NOT stored.
+        """
+        if identifiable is not None:
+            cache.add(identifiable=identifiable, record=record)
 
-    def split_into_inserts_and_updates(self, ent_list: List[db.Entity]):
-        if self.identifiableAdapter is None:
-            raise RuntimeError("Should not happen.")
-        to_be_inserted: List[db.Entity] = []
-        to_be_updated: List[db.Entity] = []
+    @staticmethod
+    def bend_references_to_new_object(old, new, entities):
+        """ Bend references to the other object
+        Iterate over all entities in `entities` and check the values of all properties of
+        occurances of old Entity and replace them with new Entity
+        """
+        for el in entities:
+            for p in el.properties:
+                if isinstance(p.value, list):
+                    for index, val in enumerate(p.value):
+                        if val is old:
+                            p.value[index] = new
+                else:
+                    if p.value is old:
+                        p.value = new
+
+    @staticmethod
+    def create_reference_mapping(flat: list[db.Entity]):
+        """
+        Create a dictionary of dictionaries of the form:
+        dict[int, dict[str, list[db.Entity]]]
+
+        - The integer index is the Python id of the value object.
+        - The string is the name of the first parent of the referencing object.
+
+        Each value objects is taken from the values of all properties from the list flat.
+
+        So the returned mapping maps ids of entities to the objects which are referring
+        to them.
+        """
+        # TODO we need to treat children of RecordTypes somehow.
+        references: dict[int, dict[str, list[db.Entity]]] = {}
+        for ent in flat:
+            for p in ent.properties:
+                val = p.value
+                if not isinstance(val, list):
+                    val = [val]
+                for v in val:
+                    if isinstance(v, db.Entity):
+                        if id(v) not in references:
+                            references[id(v)] = {}
+                        if ent.parents[0].name not in references[id(v)]:
+                            references[id(v)][ent.parents[0].name] = []
+                        references[id(v)][ent.parents[0].name].append(ent)
+
+        return references
+
+    def split_into_inserts_and_updates(self, ent_list: list[db.Entity]):
+        to_be_inserted: list[db.Entity] = []
+        to_be_updated: list[db.Entity] = []
         flat = list(ent_list)
         # assure all entities are direct members TODO Can this be removed at some point?Check only?
-        self.create_flat_list(ent_list, flat)
+        Crawler.create_flat_list(ent_list, flat)
 
         # TODO: can the following be removed at some point
         for ent in flat:
@@ -660,102 +752,93 @@ class Crawler(object):
         # flat contains Entities which could not yet be checked against the remote server
         while resolved_references and len(flat) > 0:
             resolved_references = False
-
+            referencing_entities = self.create_reference_mapping(
+                flat + to_be_updated + to_be_inserted)
+
+            # For each element we try to find out whether we can find it in the server or whether
+            # it does not yet exist. Since a Record may reference other unkown Records it might not
+            # be possible to answer this right away.
+            # The following checks are done on each Record:
+            # 1. Can it be identified via an ID?
+            # 2. Can it be identified via a path?
+            # 3. Is it in the cache of already checked Records?
+            # 4. Can it be checked on the remote server?
+            # 5. Does it have to be new since a needed reference is missing?
             for i in reversed(range(len(flat))):
                 record = flat[i]
+                identifiable = self.identifiableAdapter.get_identifiable(
+                    record,
+                    referencing_entities=referencing_entities)
 
                 # TODO remove if the exception is never raised
-                if (record.id is not None or record in to_be_inserted):
+                if record in to_be_inserted:
                     raise RuntimeError("This should not be reached since treated elements"
                                        "are removed from the list")
-                # Check the local cache first for duplicate
-                elif self.get_identified_record_from_local_cache(record) is not None:
-
-                    # This record is a duplicate that can be removed. Make sure we do not lose
-                    # information
-                    # Update an (local) identified record that will be inserted
-                    newrecord = self.get_identified_record_from_local_cache(
-                        record)
-                    self.copy_attributes(fro=record, to=newrecord)
-                    # Bend references to the other object
-                    # TODO refactor this
-                    for el in flat + to_be_inserted + to_be_updated:
-                        for p in el.properties:
-                            if isinstance(p.value, list):
-                                for index, val in enumerate(p.value):
-                                    if val is record:
-                                        p.value[index] = newrecord
-                            else:
-                                if p.value is record:
-                                    p.value = newrecord
-
+                # 1. Can it be identified via an ID?
+                elif record.id is not None:
+                    to_be_updated.append(record)
+                    self.add_to_remote_existing_cache(record, identifiable)
                     del flat[i]
-
-                # all references need to be IDs that exist on the remote server
-                elif self.can_be_checked_externally(record):
-
-                    # Check remotely
-                    # TODO: remove deepcopy?
-                    identified_record = self.identifiableAdapter.retrieve_identified_record_for_record(
-                        deepcopy(record))
-                    if identified_record is None:
-                        # identifiable does not exist remotely
+                # 2. Can it be identified via a path?
+                elif record.path is not None:
+                    existing = self._get_entity_by_path(record.path)
+                    if existing is None:
                         to_be_inserted.append(record)
-                        self.add_identified_record_to_local_cache(record)
+                        self.add_to_remote_missing_cache(record, identifiable)
                         del flat[i]
                     else:
-                        # side effect
-                        record.id = identified_record.id
-                        # On update every property needs to have an ID.
-                        # This will be achieved by the function execute_updates_in_list below.
-                        # For files this is not enough, we also need to copy over
-                        # checksum and size:
-                        if isinstance(record, db.File):
-                            record._size = identified_record._size
-                            record._checksum = identified_record._checksum
-
+                        record.id = existing.id
+                        # TODO check the following copying of _size and _checksum
+                        # Copy over checksum and size too if it is a file
+                        record._size = existing._size
+                        record._checksum = existing._checksum
                         to_be_updated.append(record)
-                        # TODO think this through
-                        self.add_identified_record_to_local_cache(record)
+                        self.add_to_remote_existing_cache(record, identifiable)
                         del flat[i]
-                    resolved_references = True
-
-                # e.g. references an identifiable that does not exist remotely
-                elif self.all_references_are_existing_already(record):
-
-                    # TODO: (for review)
-                    # This was the old version, but also for this case the
-                    # check for identifiables has to be done.
-                    # to_be_inserted.append(record)
-                    # self.add_identified_record_to_local_cache(record)
-                    # del flat[i]
+                # 3. Is it in the cache of already checked Records?
+                elif self.get_from_any_cache(identifiable) is not None:
+                    # We merge the two in order to prevent loss of information
+                    newrecord = self.get_from_any_cache(identifiable)
+                    try:
+                        merge_entities(newrecord, record)
+                    except EntityMergeConflictError:
+                        continue
+                    Crawler.bend_references_to_new_object(
+                        old=record, new=newrecord, entities=flat + to_be_updated + to_be_inserted)
 
-                    # TODO: (for review)
-                    # If the following replacement is not done, the cache will
-                    # be invalid as soon as references are resolved.
-                    # replace references by versions from cache:
-                    self.replace_references_with_cached(record)
+                    del flat[i]
+                    resolved_references = True
 
-                    identified_record = self.identifiableAdapter.retrieve_identified_record_for_record(
-                        deepcopy(record))
+                # 4. Can it be checked on the remote server?
+                elif not self._has_reference_value_without_id(identifiable):
+                    identified_record = (
+                        self.identifiableAdapter.retrieve_identified_record_for_identifiable(
+                            identifiable))
                     if identified_record is None:
-                        # identifiable does not exist remotely
+                        # identifiable does not exist remotely -> record needs to be inserted
                         to_be_inserted.append(record)
-                        self.add_identified_record_to_local_cache(record)
+                        self.add_to_remote_missing_cache(record, identifiable)
                         del flat[i]
                     else:
                         # side effect
                         record.id = identified_record.id
-                        # On update every property needs to have an ID.
-                        # This will be achieved by the function execute_updates_in_list below.
-
                         to_be_updated.append(record)
-                        # TODO think this through
-                        self.add_identified_record_to_local_cache(record)
+                        self.add_to_remote_existing_cache(record, identifiable)
                         del flat[i]
+                    resolved_references = True
 
+                # 5. Does it have to be new since a needed reference is missing?
+                # (Is it impossible to check this record because an identifiable references a
+                # missing record?)
+                elif self._has_missing_object_in_references(identifiable, referencing_entities):
+                    to_be_inserted.append(record)
+                    self.add_to_remote_missing_cache(record, identifiable)
+                    del flat[i]
                     resolved_references = True
 
+            for record in flat:
+                self.replace_references_with_cached(record, referencing_entities)
+
         if len(flat) > 0:
             raise RuntimeError(
                 "Could not resolve all Entity references. Circular Dependency?")
@@ -774,28 +857,70 @@ class Crawler(object):
                             el.value[index] = val.id
 
     @staticmethod
-    def remove_unnecessary_updates(target_data: List[db.Record],
-                                   identified_records: List[db.Record]):
+    def _merge_properties_from_remote(
+            crawled_data: list[db.Record],
+            identified_records: list[db.Record]
+    ):
+        """Merge entity representation that was created by crawling the data with remotely found
+        identified records s.th. new properties and property values are updated correctly but
+        additional properties are not overwritten.
+
+        Parameters
+        ----------
+        crawled_data : list[db.Record]
+            List of the Entities  created by the crawler
+        identified_records : list[db.Record]
+            List of identified remote Records
+
+        Returns
+        -------
+        to_be_updated : list[db.Record]
+            List of merged records
         """
-        checks whether all relevant attributes (especially Property values) are equal
+        to_be_updated = []
+        for target, identified in zip(crawled_data, identified_records):
+            # Special treatment for name and description in case they have been
+            # set in the server independently from the crawler
+            for attr in ["name", "description"]:
+                if getattr(target, attr) is None:
+                    # The crawler didn't find any name or description, i.e., not
+                    # an empty one. In this case (and only in this), keep any
+                    # existing name or description.
+                    setattr(target, attr, getattr(identified, attr))
+
+            # Create a temporary copy since the merge will be conducted in place
+            tmp = deepcopy(identified)
+            # A force merge will overwrite any properties that both the
+            # identified and the crawled record have with the values of the
+            # crawled record while keeping existing properties intact.
+            merge_entities(tmp, target, force=True)
+            to_be_updated.append(tmp)
+
+        return to_be_updated
 
-        Returns (in future)
+    @staticmethod
+    def remove_unnecessary_updates(
+            crawled_data: list[db.Record],
+            identified_records: list[db.Record]
+    ):
+        """Compare the Records to be updated with their remote
+        correspondant. Only update if there are actual differences.
+
+        Returns
         -------
         update list without unecessary updates
 
         """
-        if len(target_data) != len(identified_records):
+        if len(crawled_data) != len(identified_records):
             raise RuntimeError("The lists of updates and of identified records need to be of the "
                                "same length!")
-        # TODO this can now easily be changed to a function without side effect
-        for i in reversed(range(len(target_data))):
-            identical = check_identical(target_data[i], identified_records[i])
+        actual_updates = []
+        for i in reversed(range(len(crawled_data))):
 
-            if identical:
-                del target_data[i]
-                continue
-            else:
-                pass
+            if not check_identical(crawled_data[i], identified_records[i]):
+                actual_updates.append(crawled_data[i])
+
+        return actual_updates
 
     @staticmethod
     def execute_parent_updates_in_list(to_be_updated, securityMode, run_id, unique_names):
@@ -843,12 +968,19 @@ class Crawler(object):
     def _get_entity_by_name(name):
         return db.Entity(name=name).retrieve()
 
+    @staticmethod
+    def _get_entity_by_path(path):
+        try:
+            return db.execute_query(f"FIND FILE WHICH IS STORED AT '{path}'", unique=True)
+        except db.exceptions.EmptyUniqueQueryError:
+            return None
+
     @staticmethod
     def _get_entity_by_id(id):
         return db.Entity(id=id).retrieve()
 
     @staticmethod
-    def execute_inserts_in_list(to_be_inserted, securityMode, run_id: int = None,
+    def execute_inserts_in_list(to_be_inserted, securityMode, run_id: uuid.UUID = None,
                                 unique_names=True):
         for record in to_be_inserted:
             for prop in record.properties:
@@ -876,7 +1008,7 @@ class Crawler(object):
                     _resolve_datatype(prop, entity)
 
     @staticmethod
-    def execute_updates_in_list(to_be_updated, securityMode, run_id: int = None,
+    def execute_updates_in_list(to_be_updated, securityMode, run_id: uuid.UUID = None,
                                 unique_names=True):
         Crawler.set_ids_and_datatype_of_parents_and_properties(to_be_updated)
         logger.debug("UPDATE")
@@ -888,12 +1020,12 @@ class Crawler(object):
                 update_cache = UpdateCache()
                 update_cache.insert(to_be_updated, run_id)
 
-    def _synchronize(self, target_data: List[db.Record], commit_changes: bool = True,
+    def _synchronize(self, crawled_data: list[db.Record], commit_changes: bool = True,
                      unique_names=True):
         """
         This function applies several stages:
-        1) Retrieve identifiables for all records in target_data.
-        2) Compare target_data with existing records.
+        1) Retrieve identifiables for all records in crawled_data.
+        2) Compare crawled_data with existing records.
         3) Insert and update records based on the set of identified differences.
 
         This function makes use of an IdentifiableAdapter which is used to retrieve
@@ -905,11 +1037,8 @@ class Crawler(object):
         Return the final to_be_inserted and to_be_updated as tuple.
         """
 
-        if self.identifiableAdapter is None:
-            raise RuntimeError("Should not happen.")
-
-        to_be_inserted, to_be_updated = self.split_into_inserts_and_updates(
-            target_data)
+        to_be_inserted, to_be_updated = self.split_into_inserts_and_updates(crawled_data)
+        referencing_entities = self.create_reference_mapping(to_be_updated + to_be_inserted)
 
         # TODO: refactoring of typo
         for el in to_be_updated:
@@ -917,11 +1046,16 @@ class Crawler(object):
             self.replace_entities_with_ids(el)
 
         identified_records = [
-            self.identifiableAdapter.retrieve_identified_record_for_record(
-                record)
+            self.identifiableAdapter.retrieve_identified_record_for_record(record,
+                                                                           referencing_entities)
             for record in to_be_updated]
-        # remove unnecessary updates from list by comparing the target records to the existing ones
-        self.remove_unnecessary_updates(to_be_updated, identified_records)
+        # Merge with existing data to prevent unwanted overwrites
+        to_be_updated = self._merge_properties_from_remote(to_be_updated,
+                                                           identified_records)
+        # remove unnecessary updates from list by comparing the target records
+        # to the existing ones
+        to_be_updated = self.remove_unnecessary_updates(
+            to_be_updated, identified_records)
 
         if commit_changes:
             self.execute_parent_updates_in_list(to_be_updated, securityMode=self.securityMode,
@@ -969,7 +1103,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
 
     @staticmethod
     def debug_build_usage_tree(converter: Converter):
-        res: Dict[str, Dict[str, Any]] = {
+        res: dict[str, dict[str, Any]] = {
             converter.name: {
                 "usage": ", ".join(converter.metadata["usage"]),
                 "subtree": {}
@@ -986,7 +1120,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
         return res
 
     def save_debug_data(self, filename: str):
-        paths: Dict[str, Union[dict, list]] = dict()
+        paths: dict[str, Union[dict, list]] = dict()
 
         def flatten_debug_info(key):
             mod_info = self.debug_metadata[key]
@@ -1011,11 +1145,11 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
         with open(filename, "w") as f:
             f.write(yaml.dump(paths, sort_keys=False))
 
-    def _crawl(self, items: List[StructureElement],
-               local_converters: List[Converter],
+    def _crawl(self, items: list[StructureElement],
+               local_converters: list[Converter],
                generalStore: GeneralStore,
                recordStore: RecordStore,
-               structure_elements_path: List[str], converters_path: List[str]):
+               structure_elements_path: list[str], converters_path: list[str]):
         """
         Crawl a list of StructureElements and apply any matching converters.
 
@@ -1080,7 +1214,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
         # to the general update container.
         scoped_records = recordStore.get_records_current_scope()
         for record in scoped_records:
-            self.target_data.append(record)
+            self.crawled_data.append(record)
 
         # TODO: the scoped variables should be cleaned up as soon if the variables
         #       are no longer in the current scope. This can be implemented as follows,
@@ -1093,7 +1227,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
         #     del recordStore[name]
         #     del generalStore[name]
 
-        return self.target_data
+        return self.crawled_data
 
 
 def crawler_main(crawled_directory_path: str,
@@ -1103,7 +1237,7 @@ def crawler_main(crawled_directory_path: str,
                  provenance_file: str = None,
                  dry_run: bool = False,
                  prefix: str = "",
-                 securityMode: int = SecurityMode.UPDATE,
+                 securityMode: SecurityMode = SecurityMode.UPDATE,
                  unique_names=True,
                  ):
     """
@@ -1155,7 +1289,7 @@ def crawler_main(crawled_directory_path: str,
                 "update": updates}))
     else:
         rtsfinder = dict()
-        for elem in crawler.target_data:
+        for elem in crawler.crawled_data:
             if isinstance(elem, db.File):
                 # correct the file path:
                 # elem.file = os.path.join(args.path, elem.file)
@@ -1205,6 +1339,9 @@ def parse_args():
     parser.add_argument("crawled_directory_path",
                         help="The subtree of files below the given path will "
                         "be considered. Use '/' for everything.")
+    parser.add_argument("-c", "--add-cwd-to-path", action="store_true",
+                        help="If given, the current working directory(cwd) is added to the Python "
+                        "path.")
     parser.add_argument("-s", "--security-mode", choices=["retrieve", "insert", "update"],
                         default="retrieve",
                         help="Determines whether entities may only be read from the server, or "
@@ -1238,6 +1375,8 @@ def main():
     else:
         logger.setLevel(logging.INFO)
 
+    if args.add_cwd_to_path:
+        sys.path.append(os.path.abspath("."))
     sys.exit(crawler_main(
         crawled_directory_path=args.crawled_directory_path,
         cfood_file_name=args.cfood_file_name,
diff --git a/src/caoscrawler/identifiable.py b/src/caoscrawler/identifiable.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ff7172576be08e068ba412f319b059fb349bbeb
--- /dev/null
+++ b/src/caoscrawler/identifiable.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2022 Henrik tom Wörden
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+from __future__ import annotations
+import caosdb as db
+from datetime import datetime
+import json
+from hashlib import sha256
+from typing import Union
+
+
+class Identifiable():
+    """
+    The fingerprint of a Record in CaosDB.
+
+    This class contains the information that is used by the CaosDB Crawler to identify Records.
+    On one hand, this can be the ID or a Record or the path of a File.
+    On the other hand, in order to check whether a Record exits in the CaosDB Server, a query can
+    be created using the information contained in the Identifiable.
+
+    Parameters
+    ----------
+    record_type: str, this RecordType has to be a parent of the identified object
+    name: str, the name of the identified object
+    properties: dict, keys are names of Properties; values are Property values
+                Note, that lists are not checked for equality but are interpreted as multiple
+                conditions for a single Property.
+    path: str, In case of files: The path where the file is stored.
+    backrefs: list, TODO future
+    """
+
+    def __init__(self, record_id: int = None, path: str = None, record_type: str = None,
+                 name: str = None, properties: dict = None,
+                 backrefs: list[Union[int, str]] = None):
+        if (record_id is None and path is None and name is None
+                and (backrefs is None or len(backrefs) == 0)
+                and (properties is None or len(properties) == 0)):
+            raise ValueError("There is no identifying information. You need to add a path or "
+                             "properties or other identifying attributes.")
+        if properties is not None and 'name' in [k.lower() for k in properties.keys()]:
+            raise ValueError("Please use the separete 'name' keyword instead of the properties "
+                             "dict for name")
+        self.record_id = record_id
+        self.path = path
+        self.record_type = record_type
+        self.name = name
+        self.properties: dict = {}
+        if properties is not None:
+            self.properties = properties
+        self.backrefs: list[Union[int, db.Entity]] = []
+        if backrefs is not None:
+            self.backrefs = backrefs
+
+    def get_representation(self) -> str:
+        return sha256(Identifiable._create_hashable_string(self).encode('utf-8')).hexdigest()
+
+    @staticmethod
+    def _value_representation(value) -> str:
+        """returns the string representation of property values to be used in the hash function
+
+        The string is the path of a File Entity, the CaosDB ID or Python ID of other Entities
+        (Python Id only if there is no CaosDB ID) and the string representation of bool, float, int
+        and str.
+        """
+
+        if value is None:
+            return "None"
+        elif isinstance(value, db.File):
+            return str(value.path)
+        elif isinstance(value, db.Entity):
+            if value.id is not None:
+                return str(value.id)
+            else:
+                return "PyID=" + str(id(value))
+        elif isinstance(value, list):
+            return "[" + ", ".join([Identifiable._value_representation(el) for el in value]) + "]"
+        elif (isinstance(value, str) or isinstance(value, int) or isinstance(value, float)
+              or isinstance(value, datetime)):
+            return str(value)
+        else:
+            raise ValueError(f"Unknown datatype of the value: {value}")
+
+    @staticmethod
+    def _create_hashable_string(identifiable: Identifiable) -> str:
+        """
+        creates a string from the attributes of an identifiable that can be hashed
+        String has the form "P<parent>N<name>R<reference-ids>a:5b:10"
+        """
+        rec_string = "P<{}>N<{}>R<{}>".format(
+            identifiable.record_type,
+            identifiable.name,
+            [Identifiable._value_representation(el) for el in identifiable.backrefs])
+        # TODO this structure neglects Properties if multiple exist for the same name
+        for pname in sorted(identifiable.properties.keys()):
+            rec_string += ("{}:".format(pname) +
+                           Identifiable._value_representation(identifiable.properties[pname]))
+        return rec_string
+
+    def __eq__(self, other) -> bool:
+        """
+        Identifiables are equal if they belong to the same Record. Since ID and path are on their
+        own enough to identify the Record it is sufficient if those attributes are equal.
+        1. both IDs are set (not None)  -> equal if IDs are equal
+        2. both paths are set (not None)  -> equal if paths are equal
+        3. equal if attribute representations are equal
+        """
+        if not isinstance(other, Identifiable):
+            raise ValueError("Identifiable can only be compared to other Identifiable objects.")
+        elif self.record_id is not None and other.record_id is not None:
+            return self.record_id == other.record_id
+        elif self.path is not None and other.path is not None:
+            return self.path == other.path
+        elif self.get_representation() == other.get_representation():
+            return True
+        else:
+            return False
+
+    def __repr__(self):
+        pstring = json.dumps(self.properties)
+        return (f"{self.__class__.__name__} for RT {self.record_type}: id={self.record_id}; "
+                f"name={self.name}\n\tpath={self.path}\n"
+                f"\tproperties:\n{pstring}\n"
+                f"\tbackrefs:\n{self.backrefs}")
diff --git a/src/caoscrawler/identifiable_adapters.py b/src/caoscrawler/identifiable_adapters.py
index d4c2b1d04316946dc28fec15489e0dc390cb9dd3..653951f404f2ccbd71f6ff998e6713a4f449dade 100644
--- a/src/caoscrawler/identifiable_adapters.py
+++ b/src/caoscrawler/identifiable_adapters.py
@@ -4,8 +4,8 @@
 # ** header v3.0
 # This file is a part of the CaosDB Project.
 #
-# Copyright (C) 2021 Henrik tom Wörden
-#               2021 Alexander Schlemmer
+# Copyright (C) 2021-2022 Henrik tom Wörden
+#               2021-2022 Alexander Schlemmer
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -23,9 +23,11 @@
 # ** end header
 #
 
+from __future__ import annotations
 import yaml
 
 from datetime import datetime
+from .identifiable import Identifiable
 import caosdb as db
 import logging
 from abc import abstractmethod, ABCMeta
@@ -86,7 +88,7 @@ class IdentifiableAdapter(metaclass=ABCMeta):
     """
 
     @staticmethod
-    def create_query_for_identifiable(ident: db.Record):
+    def create_query_for_identifiable(ident: Identifiable):
         """
         This function is taken from the old crawler:
         caosdb-advanced-user-tools/src/caosadvancedtools/crawler.py
@@ -95,34 +97,34 @@ class IdentifiableAdapter(metaclass=ABCMeta):
         whether the required record already exists.
         """
 
-        if len(ident.parents) != 1:
-            raise RuntimeError(
-                "Multiple parents for identifiables not supported.")
+        query_string = "FIND Record "
+        if ident.record_type is not None:
+            query_string += ident.record_type
+        for ref in ident.backrefs:
+            eid = ref
+            if isinstance(ref, db.Entity):
+                eid = ref.id
+            query_string += (" WHICH IS REFERENCED BY " + str(eid) + " AND")
 
-        query_string = "FIND Record " + ident.get_parents()[0].name
         query_string += " WITH "
 
-        if ident.name is None and len(ident.get_properties()) == 0:
-            raise ValueError(
-                "The identifiable must have features to identify it.")
-
         if ident.name is not None:
             query_string += "name='{}'".format(ident.name)
-            if len(ident.get_properties()) > 0:
+            if len(ident.properties) > 0:
                 query_string += " AND "
 
         query_string += IdentifiableAdapter.create_property_query(ident)
         return query_string
 
     @staticmethod
-    def create_property_query(entity: db.Entity):
+    def create_property_query(entity: Identifiable):
         query_string = ""
-        for p in entity.get_properties():
-            if p.value is None:
-                query_string += "'" + p.name + "' IS NULL AND "
-            elif isinstance(p.value, list):
-                for v in p.value:
-                    query_string += ("'" + p.name + "'='" +
+        for pname, pvalue in entity.properties.items():
+            if pvalue is None:
+                query_string += "'" + pname + "' IS NULL AND "
+            elif isinstance(pvalue, list):
+                for v in pvalue:
+                    query_string += ("'" + pname + "'='" +
                                      convert_value(v) + "' AND ")
 
             # TODO: (for review)
@@ -136,8 +138,8 @@ class IdentifiableAdapter(metaclass=ABCMeta):
             #                      IdentifiableAdapter.create_property_query(p.value) +
             #                      ") AND ")
             else:
-                query_string += ("'" + p.name + "'='" +
-                                 convert_value(p.value) + "' AND ")
+                query_string += ("'" + pname + "'='" +
+                                 convert_value(pvalue) + "' AND ")
         # remove the last AND
         return query_string[:-4]
 
@@ -160,87 +162,90 @@ class IdentifiableAdapter(metaclass=ABCMeta):
         """
         pass
 
-    def get_identifiable_for_file(self, record: db.File):
-        """
-        Retrieve an identifiable for a file.
-
-        Currently an identifiable for a file ist just a File object
-        with a specific path. In the future, this could be extended
-        to allow for names, parents and custom properties.
-        """
-        identifiable = db.File()
-        identifiable.path = record.path
-        return identifiable
-
-    def get_identifiable(self, record: db.Record):
+    def get_identifiable(self, record: db.Record, referencing_entities=None):
         """
         retrieve the registred identifiable and fill the property values to create an
         identifiable
-        """
 
-        if record.role == "File":
-            return self.get_identifiable_for_file(record)
+        Args:
+            record: the record for which the Identifiable shall be created.
+            referencing_entities: a dictionary (Type: dict[int, dict[str, list[db.Entity]]]), that
+              allows to look up entities with a certain RecordType, that reference ``record``
+
+        Returns:
+            Identifiable, the identifiable for record.
+        """
 
         registered_identifiable = self.get_registered_identifiable(record)
 
-        if registered_identifiable is None:
-            return None
+        if referencing_entities is None:
+            referencing_entities = {}
 
-        identifiable = db.Record(name=record.name)
-        if len(registered_identifiable.parents) != 1:
-            raise RuntimeError("Multiple parents for identifiables"
-                               "not supported.")
-        identifiable.add_parent(registered_identifiable.parents[0])
         property_name_list_A = []
         property_name_list_B = []
-
-        # fill the values:
-        for prop in registered_identifiable.properties:
-            if prop.name == "name":
-                # The name can be an identifiable, but it isn't a property
-                continue
-            # problem: what happens with multi properties?
-            # case A: in the registered identifiable
-            # case B: in the identifiable
-
-            record_prop = record.get_property(prop.name)
-            if record_prop is None:
-                # TODO: how to handle missing values in identifiables
-                #       raise an exception?
-                raise NotImplementedError(
-                    f"RECORD\n{record}\nPROPERTY\n{prop.name}"
-                )
-            newval = record_prop.value
-            if isinstance(record_prop.value, db.Entity):
-                newval = self.resolve_reference(record_prop.value)
-            elif isinstance(record_prop.value, list):
-                newval = list()
-                for element in record_prop.value:
-                    if isinstance(element, db.Entity):
-                        newval.append(self.resolve_reference(element))
-                    else:
-                        newval.append(element)
-            record_prop_new = db.Property(name=record_prop.name,
-                                          id=record_prop.id,
-                                          description=record_prop.description,
-                                          datatype=record_prop.datatype,
-                                          value=newval,
-                                          unit=record_prop.unit)
-            identifiable.add_property(record_prop_new)
-            property_name_list_A.append(prop.name)
-
-        # check for multi properties in the record:
-        for prop in property_name_list_A:
-            property_name_list_B.append(prop)
-        if (len(set(property_name_list_B)) != len(property_name_list_B) or len(
-                set(property_name_list_A)) != len(property_name_list_A)):
-            raise RuntimeError(
-                "Multi properties used in identifiables can cause unpredictable results.")
-
-        return identifiable
+        identifiable_props = {}
+        identifiable_backrefs = []
+
+        if registered_identifiable is not None:
+            # fill the values:
+            for prop in registered_identifiable.properties:
+                if prop.name == "name":
+                    # The name can be an identifiable, but it isn't a property
+                    continue
+                # problem: what happens with multi properties?
+                # case A: in the registered identifiable
+                # case B: in the identifiable
+
+                # TODO: similar to the Identifiable class, Registred Identifiable should be a
+                # separate class too
+                if prop.name.lower() == "is_referenced_by":
+                    for rtname in prop.value:
+                        if (id(record) in referencing_entities
+                                and rtname in referencing_entities[id(record)]):
+                            identifiable_backrefs.extend(referencing_entities[id(record)][rtname])
+                        else:
+                            # TODO: is this the appropriate error?
+                            raise NotImplementedError(
+                                f"The following record is missing an identifying property:"
+                                f"RECORD\n{record}\nIdentifying PROPERTY\n{prop.name}"
+                            )
+                    continue
+
+                record_prop = record.get_property(prop.name)
+                if record_prop is None:
+                    # TODO: how to handle missing values in identifiables
+                    #       raise an exception?
+                    # TODO: is this the appropriate error?
+                    raise NotImplementedError(
+                        f"The following record is missing an identifying property:"
+                        f"RECORD\n{record}\nIdentifying PROPERTY\n{prop.name}"
+                    )
+                identifiable_props[record_prop.name] = record_prop.value
+                property_name_list_A.append(prop.name)
+
+            # check for multi properties in the record:
+            for prop in property_name_list_A:
+                property_name_list_B.append(prop)
+            if (len(set(property_name_list_B)) != len(property_name_list_B) or len(
+                    set(property_name_list_A)) != len(property_name_list_A)):
+                raise RuntimeError(
+                    "Multi properties used in identifiables could cause unpredictable results and "
+                    "are not allowed. You might want to consider a Property with a list as value.")
+
+        # use the RecordType of the registred Identifiable if it exists
+        # We do not use parents of Record because it might have multiple
+        return Identifiable(
+            record_id=record.id,
+            record_type=(registered_identifiable.parents[0].name
+                         if registered_identifiable else None),
+            name=record.name,
+            properties=identifiable_props,
+            path=record.path,
+            backrefs=identifiable_backrefs
+        )
 
     @abstractmethod
-    def retrieve_identified_record_for_identifiable(self, identifiable: db.Record):
+    def retrieve_identified_record_for_identifiable(self, identifiable: Identifiable):
         """
         Retrieve identifiable record for a given identifiable.
 
@@ -253,7 +258,7 @@ class IdentifiableAdapter(metaclass=ABCMeta):
 
     # TODO: remove side effect
     # TODO: use ID if record has one?
-    def retrieve_identified_record_for_record(self, record: db.Record):
+    def retrieve_identified_record_for_record(self, record: db.Record, referencing_entities=None):
         """
         This function combines all functionality of the IdentifierAdapter by
         returning the identifiable after having checked for an appropriate
@@ -262,12 +267,9 @@ class IdentifiableAdapter(metaclass=ABCMeta):
         In case there was no appropriate registered identifiable or no identifiable could
         be found return value is None.
         """
-        identifiable = self.get_identifiable(record)
-
-        if identifiable is None:
-            return None
+        identifiable = self.get_identifiable(record, referencing_entities=referencing_entities)
 
-        if identifiable.role == "File":
+        if identifiable.path is not None:
             return self.get_file(identifiable)
 
         return self.retrieve_identified_record_for_identifiable(identifiable)
@@ -288,7 +290,7 @@ class LocalStorageIdentifiableAdapter(IdentifiableAdapter):
     def get_records(self):
         return self._records
 
-    def get_file(self, identifiable: db.File):
+    def get_file(self, identifiable: Identifiable):
         """
         Just look in records for a file with the same path.
         """
@@ -346,7 +348,7 @@ class LocalStorageIdentifiableAdapter(IdentifiableAdapter):
             return None
         return identifiable_candidates[0]
 
-    def check_record(self, record: db.Record, identifiable: db.Record):
+    def check_record(self, record: db.Record, identifiable: Identifiable):
         """
         Check for a record from the local storage (named "record") if it is
         the identified record for an identifiable which was created by
@@ -356,13 +358,11 @@ class LocalStorageIdentifiableAdapter(IdentifiableAdapter):
         record is the record from the local database to check against.
         identifiable is the record that was created during the crawler run.
         """
-        if len(identifiable.parents) != 1:
-            raise RuntimeError(
-                "Multiple parents for identifiables not supported.")
-        if not has_parent(record, identifiable.parents[0].name):
+        if (identifiable.record_type is not None
+                and not has_parent(record, identifiable.record_type)):
             return False
-        for prop in identifiable.properties:
-            prop_record = record.get_property(prop.name)
+        for propname, propvalue in identifiable.properties.items():
+            prop_record = record.get_property(propname)
             if prop_record is None:
                 return False
 
@@ -370,21 +370,18 @@ class LocalStorageIdentifiableAdapter(IdentifiableAdapter):
             # there are two different cases:
             # a) prop_record.value has a registered identifiable:
             #      in this case, fetch the identifiable and set the value accordingly
-            if isinstance(prop.value, db.Entity):  # lists are not checked here
-                registered = self.get_registered_identifiable(prop.value)
-
-                if registered is None:
-                    raise NotImplementedError("Non-identifiable references cannot"
-                                              " be used as properties in identifiables.")
-
-                raise RuntimeError("The identifiable which is used as property"
-                                   " here has to be inserted first.")
-
-            if prop.value != prop_record.value:
+            if isinstance(propvalue, db.Entity):  # lists are not checked here
+                otherid = prop_record.value
+                if isinstance(prop_record.value, db.Entity):
+                    otherid = prop_record.value.id
+                if propvalue.id != otherid:
+                    return False
+
+            elif propvalue != prop_record.value:
                 return False
         return True
 
-    def retrieve_identified_record_for_identifiable(self, identifiable: db.Record):
+    def retrieve_identified_record_for_identifiable(self, identifiable: Identifiable):
         candidates = []
         for record in self._records:
             if self.check_record(record, identifiable):
@@ -431,13 +428,20 @@ class CaosDBIdentifiableAdapter(IdentifiableAdapter):
         for key, value in identifiable_data.items():
             rt = db.RecordType().add_parent(key)
             for prop_name in value:
-                rt.add_property(name=prop_name)
+                if isinstance(prop_name, str):
+                    rt.add_property(name=prop_name)
+                elif isinstance(prop_name, dict):
+                    for k, v in prop_name.items():
+                        rt.add_property(name=k, value=v)
+                else:
+                    NotImplementedError("YAML is not structured correctly")
+
             self.register_identifiable(key, rt)
 
     def register_identifiable(self, name: str, definition: db.RecordType):
         self._registered_identifiables[name] = definition
 
-    def get_file(self, identifiable: db.File):
+    def get_file(self, identifiable: Identifiable):
         if identifiable.path is None:
             raise RuntimeError("Path must not be None for File retrieval.")
         candidates = db.execute_query("FIND File which is stored at {}".format(
@@ -455,6 +459,9 @@ class CaosDBIdentifiableAdapter(IdentifiableAdapter):
         It is assumed, that there is exactly one identifiable for each RecordType. Only the first
         parent of the given Record is considered; others are ignored
         """
+        if len(record.parents) == 0:
+            return None
+        # TODO We need to treat the case where multiple parents exist properly.
         rt_name = record.parents[0].name
         for name, definition in self._registered_identifiables.items():
             if definition.parents[0].name.lower() == rt_name.lower():
@@ -469,12 +476,14 @@ class CaosDBIdentifiableAdapter(IdentifiableAdapter):
             return record
         return record.id
 
-    def retrieve_identified_record_for_identifiable(self, identifiable: db.Record):
+    def retrieve_identified_record_for_identifiable(self, identifiable: Identifiable):
         query_string = self.create_query_for_identifiable(identifiable)
         candidates = db.execute_query(query_string)
         if len(candidates) > 1:
             raise RuntimeError(
-                f"Identifiable was not defined unambigiously.\n{query_string}\nReturned the following {candidates}.")
+                f"Identifiable was not defined unambigiously.\n{query_string}\nReturned the "
+                f"following {candidates}."
+                f"Identifiable:\n{identifiable.record_type}{identifiable.properties}")
         if len(candidates) == 0:
             return None
         return candidates[0]
diff --git a/src/caoscrawler/identified_cache.py b/src/caoscrawler/identified_cache.py
index 0b9d7a47bdecc4094edb1296f4c04dfa083a2436..aa2d82f8e66c738e737c62f3cc68eaf60127e28b 100644
--- a/src/caoscrawler/identified_cache.py
+++ b/src/caoscrawler/identified_cache.py
@@ -23,67 +23,43 @@
 # ** end header
 #
 
+
 """
-stores identified records and is able to detect duplicates
+see class docstring
 """
 
+from .identifiable import Identifiable
 import caosdb as db
 
-from hashlib import sha256
-
 
-def _create_hashable_string(identifiable: db.Record):
-    """
-    creates a string from the attributes of an identifiable that can be hashed
+class IdentifiedCache(object):
     """
-    if identifiable.role == "File":
-        # Special treatment for files:
-        return "P<>N<>{}:{}".format("path", identifiable.path)
-    if len(identifiable.parents) != 1:
-        # TODO: extend this
-        # maybe something like this:
-        # parent_names = ",".join(
-        #   sorted([p.name for p in identifiable.parents])
-        raise RuntimeError("Cache entry can only be generated for entities with 1 parent.")
-    rec_string = "P<{}>N<{}>".format(identifiable.parents[0].name, identifiable.name)
-    for pname in sorted([p.name for p in identifiable.properties]):
-        value = str(identifiable.get_property(pname).value)
-
-        # TODO: (for review)
-        #       This expansion of the hash function was introduced recently
-        #       to allow the special case of Files as values of properties.
-        #       We need to review the completeness of all the cases here, as the cache
-        #       is crucial for correct identification of insertion and updates.
-        if isinstance(identifiable.get_property(pname).value, db.File):
-            value = str(identifiable.get_property(pname).value.path)
-        elif isinstance(identifiable.get_property(pname).value, db.Entity):
-            value = str(identifiable.get_property(pname).value.id)
-        elif isinstance(identifiable.get_property(pname).value, list):
-            tmplist = []
-            for val in identifiable.get_property(pname).value:
-                if isinstance(val, db.Entity):
-                    tmplist.append(val.id)
-                else:
-                    tmplist.append(val)
-            value = str(tmplist)
-
-        rec_string += "{}:".format(pname) + value
-    return rec_string
-
-
-def _create_hash(identifiable: db.Record) -> str:
-    return sha256(_create_hashable_string(identifiable).encode('utf-8')).hexdigest()
+    This class is like a dictionary where the keys are Identifiables. When you check whether an
+    Identifiable exists as key this class returns True not only if that exact Python object is
+    used as a key, but if an Identifiable is used as key that is **equal** to the one being
+    considered (see __eq__ function of Identifiable). Similarly, if you do `cache[identifiable]`
+    you get the Record where the key is an Identifiable that is equal to the one in the rectangular
+    brackets.
 
+    This class is used for Records where we checked the existence in a remote server using
+    identifiables. If the Record was found, this means that we identified the corresponding Record
+    in the remote server and the ID of the local object can be set.
+    To prevent querying the server again and again for the same objects, this cache allows storing
+    Records that were found on a remote server and those that were not (typically in separate
+    caches).
+    """
 
-class IdentifiedCache(object):
     def __init__(self):
         self._cache = {}
+        self._identifiables = []
 
-    def __contains__(self, identifiable: db.Record):
-        return _create_hash(identifiable) in self._cache
+    def __contains__(self, identifiable: Identifiable):
+        return identifiable in self._identifiables
 
     def __getitem__(self, identifiable: db.Record):
-        return self._cache[_create_hash(identifiable)]
+        index = self._identifiables.index(identifiable)
+        return self._cache[id(self._identifiables[index])]
 
-    def add(self, record: db.Record, identifiable: db.Record):
-        self._cache[_create_hash(identifiable)] = record
+    def add(self, record: db.Record, identifiable: Identifiable):
+        self._cache[id(identifiable)] = record
+        self._identifiables.append(identifiable)
diff --git a/src/caoscrawler/structure_elements.py b/src/caoscrawler/structure_elements.py
index 01996b4ff3e14a9739857e6e03ceca161300b37e..cb5fad211b5e3b1b766ee95fd6f0a31c965d032b 100644
--- a/src/caoscrawler/structure_elements.py
+++ b/src/caoscrawler/structure_elements.py
@@ -23,7 +23,8 @@
 # ** end header
 #
 
-from typing import Dict
+from typing import Dict as tDict
+import warnings
 
 
 class StructureElement(object):
@@ -31,7 +32,7 @@ class StructureElement(object):
 
     def __init__(self, name):
         # Used to store usage information for debugging:
-        self.metadata: Dict[str, set[str]] = {
+        self.metadata: tDict[str, set[str]] = {
             "usage": set()
         }
 
@@ -68,48 +69,78 @@ class JSONFile(File):
 
 
 class DictElement(StructureElement):
-    def __init__(self, name: str, value):
+    def __init__(self, name: str, value: dict):
         super().__init__(name)
         self.value = value
 
 
-class Dict(StructureElement):
-    def __init__(self, name: str, value: dict):
+class TextElement(StructureElement):
+    def __init__(self, name: str, value: str):
         super().__init__(name)
         self.value = value
 
 
-class DictTextElement(DictElement):
-    def __init__(self, name: str, value: str):
-        super().__init__(name, value)
+class DictTextElement(TextElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use TextElement."))
+        super().__init__(*args, **kwargs)
 
 
-class DictIntegerElement(DictElement):
+class IntegerElement(StructureElement):
     def __init__(self, name: str, value: int):
-        super().__init__(name, value)
+        super().__init__(name)
+        self.value = value
 
 
-class DictBooleanElement(DictElement):
+class DictIntegerElement(IntegerElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use IntegerElement."))
+        super().__init__(*args, **kwargs)
+
+
+class BooleanElement(StructureElement):
     def __init__(self, name: str, value: bool):
-        super().__init__(name, value)
+        super().__init__(name)
+        self.value = value
 
 
-class DictDictElement(Dict, DictElement):
-    def __init__(self, name: str, value: dict):
-        DictElement.__init__(self, name, value)
+class DictBooleanElement(BooleanElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use BooleanElement."))
+        super().__init__(*args, **kwargs)
 
 
-class DictListElement(DictElement):
-    def __init__(self, name: str, value: dict):
-        super().__init__(name, value)
+class ListElement(StructureElement):
+    def __init__(self, name: str, value: list):
+        super().__init__(name)
+        self.value = value
 
 
-class DictFloatElement(DictElement):
-    def __init__(self, name: str, value: float):
-        super().__init__(name, value)
+class DictListElement(ListElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use ListElement."))
+        super().__init__(*args, **kwargs)
 
 
-class TextElement(StructureElement):
-    def __init__(self, name: str, value: str):
+class FloatElement(StructureElement):
+    def __init__(self, name: str, value: float):
         super().__init__(name)
         self.value = value
+
+
+class DictFloatElement(FloatElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use FloatElement."))
+        super().__init__(*args, **kwargs)
+
+
+class Dict(DictElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use DictElement."))
+        super().__init__(*args, **kwargs)
+
+
+class DictDictElement(DictElement):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is depricated. Please use DictElement."))
+        super().__init__(*args, **kwargs)
diff --git a/src/doc/README_SETUP.md b/src/doc/README_SETUP.md
index b6995c9a2d950ecd1e832d5b49dac9ed88a7e455..1f6e15d408e10e38bce0d9b9fe9b6197ec69bfc3 100644
--- a/src/doc/README_SETUP.md
+++ b/src/doc/README_SETUP.md
@@ -2,9 +2,6 @@
 
 ## Installation ##
 
-### Requirements ###
-
-
 ### How to install ###
 
 #### Linux ####
@@ -59,17 +56,12 @@ pip3 install --user .
 
 **Note**: In the near future, this package will also be made available on PyPi.
 
-## Configuration ##
-
-
-
-## Try it out ##
-
-
 
 ## Run Unit Tests
 
 ## Documentation ##
+We use sphinx to create the documentation. Docstrings in the code should comply
+with the Googly style (see link below).
 
 Build documentation in `src/doc` with `make html`.
 
@@ -79,4 +71,10 @@ Build documentation in `src/doc` with `make html`.
 - `sphinx-autoapi`
 - `recommonmark`
 
-### Troubleshooting ###
+### How to contribute ###
+
+- [Google Style Python Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
+- [Google Style Python Docstrings 2nd reference](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings)
+- [References to other documentation](https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#role-external)
+
+
diff --git a/src/doc/concepts.rst b/src/doc/concepts.rst
index c0f21cbaa322caddabed8e045f7b6fc4253d2959..89757f21958f3d94649b33e9f9112593f703191d 100644
--- a/src/doc/concepts.rst
+++ b/src/doc/concepts.rst
@@ -11,7 +11,8 @@ of the existing data (For example could a tree of Python file objects
 (StructureElements) represent a file tree that exists on some file server).
 
 Relevant sources in:
-src/structure_elements.py
+
+- ``src/structure_elements.py``
 
 Converters
 ++++++++++
@@ -22,20 +23,70 @@ the above named tree. The definition of a Converter also contains what
 Converters shall be used to treat the generated child-StructureElements. The
 definition is therefore a tree itself.
 
-See `:doc:converters<converters>` for details.
+See :std:doc:`converters<converters>` for details.
 
 
 
 Relevant sources in:
-src/converters.py
+- ``src/converters.py``
 
 
 
 Identifiables
 +++++++++++++
 
-Relevant sources in:
-src/identifiable_adapters.py
+An Identifiable of a Record is like the fingerprint of a Record.
+
+The identifiable contains the information that is used by the CaosDB Crawler to identify Records.
+For example, in order to check whether a Record exits in the CaosDB Server, the CaosDB Crawler creates a query
+using the information contained in the Identifiable.
+
+Suppose a certain experiment is at most done once per day, then the identifiable could
+consist of the RecordType "SomeExperiment" (as a parent) and the Property "date" with the respective value.
+
+You can think of the properties that are used by the identifiable as a dictionary. For each property
+name there can be one value. However, this value can be a list such that the created query can look
+like "FIND RECORD ParamenterSet WITH a=5 AND a=6". This is meaningful if there is a ParamenterSet
+with two Properties with the name 'a' (multi property) or if 'a' is a list containing at least the values 5 and 6.
+
+When we use a reference Property in the identifiable, we effectively use the reference from the object to
+be identified pointing to some other object as an identifying attribute. We can also use references that point
+in the other direction, i.e. towards the object to be identified. An identifiable may denote one or more
+Entities that are referencing the object to be identified.
+
+The path of a File object can serve as a Property that identifies files and similarly the name of
+Records can be used.
+
+In the current implementation an identifiable can only use one RecordType even though the identified Records might have multiple Parents.
+
+Relevant sources in
+
+- ``src/identifiable_adapters.py``
+- ``src/identifiable.py``
+
+Registered Identifiables
+++++++++++++++++++++++++
+A Registered Identifiable is the blue print for Identifiables.
+You can think of registered identifiables as identifiables without concrete values for properties.
+RegisteredIdentifiables are
+associated with RecordTypes and define of what information an identifiable for that RecordType
+exists. There can be multiple Registered Identifiables for one RecordType.
+
+If identifiables shall contain references to the object to be identified, the Registered
+Identifiable must list the RecordTypes of the Entities that have those references.
+For example, the Registered Identifiable for the "Experiment" RecordType may contain
+the "date" Property and "Project" as the RecordType of an Entity that is referencing
+the object to be identified. Then if we have a structure of some Records at hand,
+we can check whether a Record with the parent "Project" is referencing the "Experiment"
+Record. If that is the case, this reference is part of the identifiable for the "Experiment"
+Record. Note, that if there are multiple Records with the appropriate parent (e.g.
+multiple "Project" Records in the above example) it will be required that all of them
+reference the object to be identified.
+
+
+Identified Records
+++++++++++++++++++
+TODO
 
 The Crawler
 +++++++++++
@@ -45,7 +96,8 @@ The crawler can be considered the main program doing the synchronization in basi
 #. Compare the current state of the CaosDB instance with the set of CaosDB Entities created in step 1, taking into account the :ref:`registered identifiables<Identifiables>`. Insert or update entites accordingly.
 
 Relevant sources in:
-src/crawl.py
+
+- ``src/crawl.py``
 
 
 
diff --git a/src/doc/conf.py b/src/doc/conf.py
index 30ce670eb8685e9701eeeb59bf22451a21fb16b9..b2873c846e7275b2a5bfbc8bc5cd18dabaa843ef 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -33,10 +33,10 @@ copyright = '2021, MPIDS'
 author = 'Alexander Schlemmer'
 
 # The short X.Y version
-version = '0.1'
+version = '0.2.1'
 # The full version, including alpha/beta/rc tags
 # release = '0.5.2-rc2'
-release = '0.1'
+release = '0.2.1-dev'
 
 
 # -- General configuration ---------------------------------------------------
diff --git a/src/doc/converters.rst b/src/doc/converters.rst
index 7ec93535ec41dc211e2fa7ee194b2ecbe1a659fb..ae84644072ebbd53f1325d1f9d1d0ef8e5dc6de6 100644
--- a/src/doc/converters.rst
+++ b/src/doc/converters.rst
@@ -15,8 +15,6 @@ Converters may define additional functions that create further values. For
 example, a regular expresion could be used to get a date from a file name.
 
 
-
-
 A converter is defined via a yml file or part of it. The definition states
 what kind of StructureElement it treats (typically one).
 Also, it defines how children of the current StructureElement are
@@ -64,25 +62,57 @@ Standard Converters
 
 Directory Converter
 ===================
+The Directory Converter creates StructureElements for each File and Directory
+inside the current Directory. You can match a regular expression against the
+directory name using the 'match' key.
 
 Simple File Converter
 =====================
+The Simple File Converter does not create any children and is usually used if
+a file shall be used as it is and be inserted and referenced by other entities.
 
 Markdown File Converter
 =======================
+Reads a YAML header from Markdown files (if such a header exists) and creates
+children elements according to the structure of the header.
 
-Dict Converter
+DictElement Converter
 ==============
+Creates a child StructureElement for each key in the dictionary.
 
 Typical Subtree converters
 --------------------------
+The following StructureElement are typically created:
+
+- BooleanElement
+- FloatElement
+- TextElement
+- IntegerElement
+- ListElement
+- DictElement
+
+Scalar Value Converters
+=======================
+`BooleanElementConverter`, `FloatElementConverter`, `TextElementConverter`,  and
+`IntegerElementConverter` behave very similarly.
+
+These converters expect `match_name` and `match_value` in their definition
+which allow to match the key and the value, respectively.
+
+Note that there are defaults for accepting other types. For example,
+FloatElementConverter also accepts IntegerElements. The default
+behavior can be adjusted with the fields `accept_text`, `accept_int`,
+`accept_float`, and `accept_bool`.
+
+The following denotes what kind of StructureElements are accepted by default
+(they are defined in `src/caoscrawler/converters.py`):
 
-DictBooleanElementConverter
-DictFloatElementConverter
-DictTextElementConverter
-DictIntegerElementConverter
-DictListElementConverter
-DictDictElementConverter
+- DictBooleanElementConverter: bool, int
+- DictFloatElementConverter: int, float
+- DictTextElementConverter: text, bool, int, float
+- DictIntegerElementConverter: int
+- DictListElementConverter: list
+- DictDictElementConverter: dict
 
 YAMLFileConverter
 =================
@@ -98,8 +128,6 @@ JSONFileConverter
 
 
 
-TextElementConverter
-====================
 
 TableConverter
 ==============
@@ -208,7 +236,7 @@ Now we need to create a class called "SourceResolver" in the file "sources.py".
 Furthermore we will customize the method :py:meth:`~caoscrawler.converters.Converter.create_records` that allows us to specify a more complex record generation procedure than provided in the standard implementation. One specific limitation of the standard implementation is, that only a fixed
 number of records can be generated by the yaml definition. So for any applications - like here - that require an arbitrary number of records to be created, a customized implementation of :py:meth:`~caoscrawler.converters.Converter.create_records` is recommended.
 In this context it is recommended to make use of the function :func:`caoscrawler.converters.create_records` that implements creation of record objects from python dictionaries of the same structure
-that would be given using a yaml definition.
+that would be given using a yaml definition (see next section below).
      
 .. code-block:: python
 
@@ -307,3 +335,151 @@ The following yaml block will register the converter in a yaml file:
      SourceResolver:
        package: scifolder.converters.sources
        converter: SourceResolver
+
+       
+Using the `create_records` API function
+=======================================
+
+The function :func:`caoscrawler.converters.create_records` was already mentioned above and it is
+the recommended way to create new records from custom converters. Let's have a look at the
+function signature:
+
+.. code-block:: python
+
+    def create_records(values: GeneralStore,  # <- pass the current variables store here
+                       records: RecordStore,  # <- pass the current store of CaosDB records here
+                       def_records: dict):    # <- This is the actual definition of new records!
+
+
+`def_records` is the actual definition of new records according to the yaml cfood specification
+(work in progress, in the docs). Essentially you can do everything here, that you could do
+in the yaml document as well, but using python source code.
+
+Let's have a look at a few examples:
+
+.. code-block:: yaml
+
+  DirConverter:
+    type: Directory
+    match: (?P<dir_name>.*)
+    records:
+      Experiment:
+        identifier: $dir_name
+
+This block will just create a new record with parent `Experiment` and one property
+`identifier` with a value derived from the matching regular expression.
+
+Let's formulate that using `create_records`:
+
+.. code-block:: python
+
+  dir_name = "directory name"
+  
+  record_def = {
+    "Experiment": {
+      "identifier": dir_name
+      }
+  }
+
+  keys_modified = create_records(values, records,
+                                 record_def)
+
+The `dir_name` is set explicitely here, everything else is identical to the yaml statements.
+
+
+The role of `keys_modified`
+===========================
+
+You probably have noticed already, that :func:`caoscrawler.converters.create_records` returns
+`keys_modified` which is a list of tuples. Each element of `keys_modified` has two elements:
+
+- Element 0 is the name of the record that is modified (as used in the record store `records`).
+- Element 1 is the name of the property that is modified.
+
+It is important, that the correct list of modified keys is returned by
+:py:meth:`~caoscrawler.converters.Converter.create_records` to make the crawler process work.
+
+So, a sketch of a typical implementation within a custom converter could look like this:
+
+
+.. code-block:: python
+
+  def create_records(self, values: GeneralStore,
+                       records: RecordStore,
+                       element: StructureElement,
+                       file_path_prefix: str):
+
+    # Modify some records:
+    record_def = {
+      # ...
+    }
+
+  keys_modified = create_records(values, records,
+                                 record_def)
+
+  # You can of course do it multiple times:
+  keys_modified.extend(create_records(values, records,
+                                      record_def))
+
+  # You can also process the records section of the yaml definition:
+  keys_modified.extend(
+         super().create_records(values, records, element, file_path_prefix))
+  # This essentially allows users of your converter to customize the creation of records
+  # by providing a custom "records" section additionally to the modifications provided
+  # in this implementation of the Converter.
+
+  # Important: Return the list of modified keys!
+  return keys_modified
+
+
+More complex example
+====================
+
+Let's have a look at a more complex examples, defining multiple records:
+
+.. code-block:: yaml
+
+  DirConverter:
+    type: Directory
+    match: (?P<dir_name>.*)
+    records:
+      Project:
+        identifier: project_name
+      Experiment:
+        identifier: $dir_name
+        Project: $Project
+      ProjectGroup:
+        projects: +$Project
+      
+
+This block will create two new Records:
+
+- A project with a constant identifier
+- An experiment with an identifier, derived from a regular expression and a reference to the new project.
+
+Furthermore a Record `ProjectGroup` will be edited (its initial definition is not given in the
+yaml block): The project that was just created will be added as a list element to the property
+`projects`.
+
+Let's formulate that using `create_records` (again, `dir_name` is constant here):
+
+.. code-block:: python
+
+  dir_name = "directory name"
+  
+  record_def = {
+    "Project": {
+      "identifier": "project_name",
+    }
+    "Experiment": {
+      "identifier": dir_name,
+      "Project": "$Project",
+      }
+    "ProjectGroup": {
+      "projects": "+$Project",
+    }
+    
+  }
+
+  keys_modified = create_records(values, records,
+                                 record_def)
diff --git a/src/doc/how-to-upgrade.md b/src/doc/how-to-upgrade.md
new file mode 100644
index 0000000000000000000000000000000000000000..56298e695bc4aa7ee83e407fffd39a5d0d8c21f5
--- /dev/null
+++ b/src/doc/how-to-upgrade.md
@@ -0,0 +1,16 @@
+
+# How to upgrade
+
+## 0.2.x to 0.3.0
+DictElementConverter (old: DictConverter) now can use "match" keywords. If
+none are in the definition, the behavior is as before. If you had "match",
+"match_name" or "match_value" in the definition of a
+DictConverter (StructureElement: Dict) before,
+you probably want to remove those. They were ignored before and are now used.
+
+TextElement used the 'match' keyword before, which was applied to the
+value. This is will in future be applied to the key instead and is now
+forbidden to used. If you used the 'match'
+keyword in the definition of TextElementConverter
+(StructureElement: TextElement) before, you need to change the key from "match"
+to "match_name" in order to preserve the behavior.
diff --git a/src/doc/index.rst b/src/doc/index.rst
index 724bcc543dd1cf0b9af451c487b1b3aab7fa95ca..b4e30e4728068cabb92626cfac986ab858a0bbb6 100644
--- a/src/doc/index.rst
+++ b/src/doc/index.rst
@@ -1,5 +1,5 @@
-Crawler 2.0 Documentation
-=========================
+CaosDB-Crawler Documentation
+============================
 
 
 .. toctree::
@@ -13,22 +13,23 @@ Crawler 2.0 Documentation
    CFoods (Crawler Definitions)<cfood>
    Macros<macros>
    Tutorials<tutorials/index>
+   How to upgrade<how-to-upgrade>
    API documentation<_apidoc/modules>
 
       
 
-This is the documentation for the crawler (previously known as crawler 2.0) for CaosDB, ``caosdb-crawler``.
+This is the documentation for CaosDB-Crawler (previously known as crawler 2.0) 
+the main tool for automatic data insertion into CaosDB.
 
-The crawler is the main date integration tool for CaosDB.
 Its task is to automatically synchronize data found on file systems or in other
 sources of data with the semantic data model of CaosDB.
 
 More specifically, data that is contained in a hierarchical structure is converted to a data
 structure that is consistent with a predefined semantic data model.
 
-The hierarchical sturcture can be for example a file tree. However it can be
-also something different like the contents of a json file or a file tree with
-json files.
+The hierarchical structure can be for example a file tree. However it can be
+also something different like the contents of a JSON file or a file tree with
+JSON files.
 
 This documentation helps you to :doc:`get started<README_SETUP>`, explains the most important
 :doc:`concepts<concepts>` and offers a range of :doc:`tutorials<tutorials/index>`.
@@ -40,5 +41,3 @@ Indices and tables
 * :ref:`genindex`
 * :ref:`modindex`
 * :ref:`search`
-
-
diff --git a/src/doc/macros.rst b/src/doc/macros.rst
index 3d995c1fbc67b155a6df606ac2f84a0cec26d1a5..d3a3e9b9634a4e1d72228dd46692a824e1d5acfd 100644
--- a/src/doc/macros.rst
+++ b/src/doc/macros.rst
@@ -37,7 +37,7 @@ The same version using cfood macros could be defined as follows:
         name: null
         filename: null
       definition:
-        ${name}_filename
+        ${name}_filename:
           type: SimpleFile
           match: $filename
           records:
@@ -56,13 +56,43 @@ The same version using cfood macros could be defined as follows:
       - name: README
         filename: ^README.md$
 
+The "MarkdownFile" key and its value will be replaced by everything that is 
+given below "definition" in the Macro.
+        
+The expanded version of `ExperimentalData` will look like:
 
+.. _example_files_2_expanded:
+.. code-block:: yaml
+
+  ExperimentalData:
+    match: ExperimentalData
+    subtree:
+      README_filename:
+        match: ^README.md$
+        records:
+          README:
+            file: README_filename
+            parents:
+            - MarkdownFile
+            path: README_filename
+            role: File
+        type: SimpleFile
+    type: Directory
 
+This :ref:`example<_example_files_2>` can also be found in the macro unit tests (see :func:`unittests.test_macros.test_documentation_example_2`).
 
 
 Complex Example
 ===============
 
+The following, more complex example, demonstrates the use
+of macro variable substitutions that generate crawler variable substitutions:
+
+- `$$$nodename` will lead to a macro variable substitution of variable `$nodename` during macro expansion.
+- `$$` will be turned into `$`
+- So in the crawler cfood, the string will appear as `$value` if variable `nodename` would be set to `value` when using the macro.
+
+
 .. _example_1:
 .. code-block:: yaml
    
@@ -86,3 +116,118 @@ Complex Example
                file: $$$nodename
              Simulation:
                $recordtype: +$File
+
+The expanded version of :ref:`example<_example_1>` can be seen in :ref:`example<_example_1_expanded>`.
+
+
+.. _example_1_expanded:
+.. code-block:: yaml
+                
+  SimulationData:
+    match: SimulationData
+    subtree:
+      Dataset:
+        match: .*
+        records:
+          File:
+            file: $Dataset
+            parents:
+            - DatasetFile
+            path: $Dataset
+            role: File
+          Simulation:
+            DatasetFile: +$File
+        type: SimpleFile
+    type: Directory
+
+This :ref:`example<_example_1>` can also be found in the macro unit tests (see :func:`unittests.test_macros.test_documentation_example_1`).
+
+
+
+Using Macros Multiple Times
+===========================
+
+To use the same macro multiple times in the same yaml node, lists can be used:
+
+.. _example_multiple:
+.. code-block:: yaml
+
+    ---
+    metadata:
+      macros:
+        - !defmacro
+          name: test_twice
+          params:
+            macro_name: default_name
+            a: 4
+          definition:
+            $macro_name:
+              something:
+                a: $a
+    ---
+    extroot: !macro
+      test_twice:
+      - macro_name: once  # <- This is the first replacement of the macro
+      - macro_name: twice # <- This is the second one, with different arguments
+        a: 5
+      - {}                # <- This is the third one, just using default arguments
+
+        
+This :ref:`example<_example_multiple>` is taken from the macro unit tests (see :func:`unittests.test_macros.test_use_macro_twice`).
+
+The example will be expanded to:
+
+.. _example_multiple_expanded:
+.. code-block:: yaml
+
+    extroot:
+      default_name:
+        something:
+          a: '4'
+      once:
+        something:
+          a: '4'
+      twice:
+        something:
+          a: '5'
+
+
+
+
+Limitation
+----------
+
+Currently it is not possible to use the same macro twice in the same yaml node, but in different
+positions. Consider:
+
+.. _example_multiple_limitation:
+.. code-block:: yaml
+
+    ---
+    metadata:
+      macros:
+        - !defmacro
+          name: test_twice
+          params:
+            macro_name: default_name
+            a: 4
+          definition:
+            $macro_name:
+              something:
+                a: $a
+    ---
+    extroot: !macro
+      test_twice:
+      - macro_name: once  # <- This is the first replacement of the macro
+        
+      Other_node:
+        type: test
+        
+      test_twice:  # This is NOT possible as each
+                   #  dictionary element can only appear once in a yaml node.
+      - macro_name: twice # <- This is the second one, with different arguments
+        a: 5
+      - {}                # <- This is the third one, just using default arguments
+
+However, this should not be a real limitation, as the crawler is designed in a way,
+that the order of the nodes in the same level should not matter.
diff --git a/synchronize.md b/synchronize.md
index 7d240095a770a33d55370d9e48a1ab89b60c02dd..b178e647866d1e01c85ccfc8bff3383d5f93d21d 100644
--- a/synchronize.md
+++ b/synchronize.md
@@ -28,7 +28,6 @@ b) identifiable does not exist: check the to_be_inserted dict(key is a hash comp
 
 Maybe keep another dict that tracks what Record objects are in the to_be_updated dict (id(rec) as key?)
 
-After treating leave Records, Records that could not be checked before can be checked: Either referenced Records now have an ID or they are in the to_be_inserted dict such that it is clear that the identifiable at hand does not exist in the server.
+After treating leaf Records, Records that could not be checked before can be checked: Either referenced Records now have an ID or they are in the to_be_inserted dict such that it is clear that the identifiable at hand does not exist in the server.
 
 This way, the whole structure can be resolved except if there are circular dependencies: Those can be added fully to the to_be_inserted dict. (???)
-
diff --git a/unittests/broken_cfoods/broken1.yml b/unittests/broken_cfoods/broken1.yml
index 9fd4c52934c56512ada8ea564ccd540e07e25661..86202acd7a3be90b6a8b8e85aee5109d79799239 100644
--- a/unittests/broken_cfoods/broken1.yml
+++ b/unittests/broken_cfoods/broken1.yml
@@ -39,14 +39,14 @@ DataAnalysis:  # name of the converter
               # how to make match case insensitive?
               subtree:
                 description:
-                  type: DictTextElement
+                  type: TextElement
                   match_value: (?P<description>.*)
                   match_name: description
                   records:
                     Measurement:
                       description: $description
                 responsible_single:
-                    type: DictTextElement
+                    type: TextElement
                     match_name: responsible
                     match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
                     records: &responsible_records
@@ -65,7 +65,7 @@ DataAnalysis:  # name of the converter
                   subtree:
                     Person:
                       type: TextElement
-                      match: *person_regexp
+                      match_name: *person_regexp
                       records: *responsible_records
 
 ExperimentalData:  # name of the converter
diff --git a/unittests/scifolder_cfood.yml b/unittests/scifolder_cfood.yml
index 90f193444bfda7296c46260236274da2378635cc..74fd027563907c5ae416ca389faba0ecd64d5848 100644
--- a/unittests/scifolder_cfood.yml
+++ b/unittests/scifolder_cfood.yml
@@ -42,14 +42,14 @@ Data:  # name of the converter
                   # how to make match case insensitive?
                   subtree:
                     description:
-                      type: DictTextElement
+                      type: TextElement
                       match_value: (?P<description>.*)
                       match_name: description
                       records:
                         Measurement:
                           description: $description
                     responsible_single:
-                        type: DictTextElement
+                        type: TextElement
                         match_name: responsible
                         match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
                         records: &responsible_records
@@ -68,7 +68,7 @@ Data:  # name of the converter
                       subtree:
                         Person:
                           type: TextElement
-                          match: *person_regexp
+                          match_value: *person_regexp
                           records: *responsible_records
 
     ExperimentalData:  # name of the converter
diff --git a/unittests/scifolder_extended.yml b/unittests/scifolder_extended.yml
index 9bab612b9b37e8e295ee8fd02575de506a98d8fc..26f510679ff723ce5d9c0e705609e39bce60cbde 100644
--- a/unittests/scifolder_extended.yml
+++ b/unittests/scifolder_extended.yml
@@ -55,14 +55,14 @@ Data:  # name of the converter
                           
                   subtree:
                     description:
-                      type: DictTextElement
+                      type: TextElement
                       match_value: (?P<description>.*)
                       match_name: description
                       records:
                         Measurement:
                           description: $description
                     responsible_single:
-                        type: DictTextElement
+                        type: TextElement
                         match_name: responsible
                         match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
                         records: &responsible_records
@@ -76,12 +76,12 @@ Data:  # name of the converter
                                                     # "responsible" belonging to Measurement.
 
                     responsible_list:
-                      type: DictListElement
+                      type: ListElement
                       match_name: responsible
                       subtree:
                         Person:
                           type: TextElement
-                          match: *person_regexp
+                          match_value: *person_regexp
                           records: *responsible_records
 
                     # sources_list:
diff --git a/unittests/scifolder_extended2.yml b/unittests/scifolder_extended2.yml
index 969325e91da488011819c338708a33dcfc32c93e..a189e79c12c2e1393188c8b9f532162518244508 100644
--- a/unittests/scifolder_extended2.yml
+++ b/unittests/scifolder_extended2.yml
@@ -56,14 +56,14 @@ Data:  # name of the converter
                           
                   subtree:
                     description:
-                      type: DictTextElement
+                      type: TextElement
                       match_value: (?P<description>.*)
                       match_name: description
                       records:
                         Measurement:
                           description: $description
                     responsible_single:
-                        type: DictTextElement
+                        type: TextElement
                         match_name: responsible
                         match_value: &person_regexp ((?P<first_name>.+) )?(?P<last_name>.+)
                         records: &responsible_records
@@ -77,12 +77,12 @@ Data:  # name of the converter
                                                     # "responsible" belonging to Measurement.
 
                     responsible_list:
-                      type: DictListElement
+                      type: ListElement
                       match_name: responsible
                       subtree:
                         Person:
                           type: TextElement
-                          match: *person_regexp
+                          match_value: *person_regexp
                           records: *responsible_records
 
                     # sources_list:
diff --git a/unittests/test_cache.py b/unittests/test_cache.py
deleted file mode 100644
index 135316b92fda0ac1e43f4e5f2c4f28fbf1272494..0000000000000000000000000000000000000000
--- a/unittests/test_cache.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/python
-# Tests for entity comparison
-# A. Schlemmer, 06/2021
-
-import caosdb as db
-from pytest import raises
-
-from caoscrawler.identified_cache import _create_hashable_string as create_hash_string
-
-
-def test_normal_hash_creation():
-    # Test the initial functionality:
-    # hash comprises only one parent, name and properties:
-
-    r1 = db.Record()
-    r1.add_property(name="test")
-    r1.add_parent("bla")
-    hash1 = create_hash_string(r1)
-
-    r2 = db.Record()
-    r2.add_property(name="test2")
-    r2.add_parent("bla")
-    hash2 = create_hash_string(r2)
-
-    assert hash1 != hash2
-
-    r3 = db.Record()
-    r3.add_property(name="test")
-    r3.add_parent("bla bla")
-    hash3 = create_hash_string(r3)
-    assert hash1 != hash3
-    assert hash2 != hash3
-
-    # no name and no properties and no parents:
-    r4 = db.Record()
-    with raises(RuntimeError, match=".*1 parent.*"):
-        create_hash_string(r4)
-
-    # should work
-    r4.add_parent("bla")
-    assert len(create_hash_string(r4)) > 0
-    r4.add_property(name="test")
-    assert len(create_hash_string(r4)) > 0
-
-    r4.add_parent("bla bla")
-    with raises(RuntimeError, match=".*1 parent.*"):
-        create_hash_string(r4)
-
-
-def test_file_hash_creation():
-    f1 = db.File(path="/bla/bla/test1.txt")
-    hash1 = create_hash_string(f1)
-    f2 = db.File(path="/bla/bla/test2.txt")
-    hash2 = create_hash_string(f2)
-
-    assert hash1 != hash2
diff --git a/unittests/test_converters.py b/unittests/test_converters.py
index 30c5972c4f006aaf9923dfc058c3b861d8b5123b..7ddf0c9e07049f5f12cafeae4f3ef25ddab28c56 100644
--- a/unittests/test_converters.py
+++ b/unittests/test_converters.py
@@ -28,17 +28,17 @@ import os
 import pytest
 import yaml
 
-from caoscrawler.converters import (Converter, ConverterValidationError,
-                                    DictConverter, DirectoryConverter,
+from caoscrawler.converters import (Converter, ConverterValidationError, DictElementConverter,
+                                    DirectoryConverter, DictIntegerElementConverter,
                                     handle_value, MarkdownFileConverter,
+                                    FloatElementConverter, IntegerElementConverter,
                                     JSONFileConverter)
+from caoscrawler.converters import _AbstractScalarValueElementConverter
 from caoscrawler.crawl import Crawler
 from caoscrawler.stores import GeneralStore
-from caoscrawler.structure_elements import (File, DictTextElement,
-                                            DictListElement, DictElement,
-                                            DictBooleanElement, DictDictElement,
-                                            DictIntegerElement,
-                                            DictFloatElement, Directory)
+from caoscrawler.structure_elements import (File, TextElement, ListElement, DictElement,
+                                            BooleanElement, IntegerElement,
+                                            FloatElement, Directory)
 
 from test_tool import rfp
 
@@ -52,14 +52,14 @@ def converter_registry():
         "MarkdownFile": {
             "converter": "MarkdownFileConverter",
             "package": "caoscrawler.converters"},
-        "Dict": {
-            "converter": "DictConverter",
+        "DictElement": {
+            "converter": "DictElementConverter",
             "package": "caoscrawler.converters"},
-        "DictTextElement": {
-            "converter": "DictTextElementConverter",
+        "TextElement": {
+            "converter": "TextElementConverter",
             "package": "caoscrawler.converters"},
-        "DictListElement": {
-            "converter": "DictListElementConverter",
+        "ListElement": {
+            "converter": "ListElementConverter",
             "package": "caoscrawler.converters"},
         "TextElement": {
             "converter": "TextElementConverter",
@@ -80,8 +80,8 @@ def testConverterTrivial(converter_registry):
     types = [
         "Directory",
         "MarkdownFile",
-        "DictTextElement",
-        "DictListElement",
+        "TextElement",
+        "ListElement",
         "TextElement"
     ]
 
@@ -149,11 +149,11 @@ def test_markdown_converter(converter_registry):
 
     children = converter.create_children(None, test_readme)
     assert len(children) == 5
-    assert children[1].__class__ == DictTextElement
+    assert children[1].__class__ == TextElement
     assert children[1].name == "description"
     assert children[1].value.__class__ == str
 
-    assert children[0].__class__ == DictTextElement
+    assert children[0].__class__ == TextElement
     assert children[0].name == "responsible"
     assert children[0].value.__class__ == str
 
@@ -169,11 +169,11 @@ def test_markdown_converter(converter_registry):
 
     children = converter.create_children(None, test_readme2)
     assert len(children) == 2
-    assert children[1].__class__ == DictTextElement
+    assert children[1].__class__ == TextElement
     assert children[1].name == "description"
     assert children[1].value.__class__ == str
 
-    assert children[0].__class__ == DictListElement
+    assert children[0].__class__ == ListElement
     assert children[0].name == "responsible"
     assert children[0].value.__class__ == list
 
@@ -195,39 +195,39 @@ def test_json_converter(converter_registry):
 
     children = jsonconverter.create_children(None, test_json)
     assert len(children) == 8
-    assert children[0].__class__ == DictTextElement
+    assert children[0].__class__ == TextElement
     assert children[0].name == "name"
     assert children[0].value.__class__ == str
     assert children[0].value == "DEMO"
 
-    assert children[1].__class__ == DictIntegerElement
+    assert children[1].__class__ == IntegerElement
     assert children[1].name == "projectId"
     assert children[1].value.__class__ == int
     assert children[1].value == 10002
 
-    assert children[2].__class__ == DictBooleanElement
+    assert children[2].__class__ == BooleanElement
     assert children[2].name == "archived"
     assert children[2].value.__class__ == bool
 
-    assert children[3].__class__ == DictListElement
+    assert children[3].__class__ == ListElement
     assert children[3].name == "Person"
     assert children[3].value.__class__ == list
     assert len(children[3].value) == 2
 
-    assert children[4].__class__ == DictTextElement
+    assert children[4].__class__ == TextElement
     assert children[4].name == "start_date"
     assert children[4].value.__class__ == str
 
-    assert children[5].__class__ == DictListElement
+    assert children[5].__class__ == ListElement
     assert children[5].name == "candidates"
     assert children[5].value.__class__ == list
     assert children[5].value == ["Mouse", "Penguine"]
 
-    assert children[6].__class__ == DictFloatElement
+    assert children[6].__class__ == FloatElement
     assert children[6].name == "rvalue"
     assert children[6].value.__class__ == float
 
-    assert children[7].__class__ == DictTextElement
+    assert children[7].__class__ == TextElement
     assert children[7].name == "url"
     assert children[7].value.__class__ == str
 
@@ -385,3 +385,62 @@ MyElement:
         list(yaml.safe_load_all(two_doc_yaml)))
     assert "MyElement" in two_doc_definitions
     assert two_doc_definitions["MyElement"]["type"] == one_doc_definitions["MyElement"]["type"]
+
+
+def test_abstract_dict_element_converter():
+    definition = yaml.safe_load("""
+match_name: text
+match_value: .*begin(?P<text>.*)end
+accept_text: True
+    """)
+    converter = _AbstractScalarValueElementConverter(
+        definition, "test_converter",
+        None  # This is possible when "subtree" is not used
+    )
+    element = TextElement("text", """
+begin
+bla
+end""")
+    val = converter.match(element)
+    assert val is not None
+    assert val["text"] == "\nbla\n"
+
+
+def test_converter_value_match(converter_registry):
+    # test with defaults
+    dc = FloatElementConverter(
+        definition={
+            "match_name": "(.*)",
+            "match_value": "(.*)",
+        },
+        name="Test",
+        converter_registry=converter_registry
+    )
+    m = dc.match(IntegerElement(name="a", value=4))
+    assert m is not None
+
+    # overwrite default with no match for int
+    dc = FloatElementConverter(
+        definition={
+            "match_name": "(.*)",
+            "match_value": "(.*)",
+            "accept_int": False,
+        },
+        name="Test",
+        converter_registry=converter_registry
+    )
+    with pytest.raises(RuntimeError) as err:
+        m = dc.match(IntegerElement(name="a", value=4))
+
+    # overwrite default with match for float
+    dc = IntegerElementConverter(
+        definition={
+            "match_name": "(.*)",
+            "match_value": "(.*)",
+            "accept_float": True,
+        },
+        name="Test",
+        converter_registry=converter_registry
+    )
+    m = dc.match(FloatElement(name="a", value=4.0))
+    assert m is not None
diff --git a/unittests/test_directories/example_substitutions/ExperimentalData/file.data b/unittests/test_directories/example_substitutions/ExperimentalData/file.data
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/unittests/test_directories/example_substitutions/substitutions_parents.yml b/unittests/test_directories/example_substitutions/substitutions_parents.yml
index 107e766ccd833fab618cecfc04f13bc29abc80a6..2e208b522cc79569a7679a9df4ba7782ad1fe1c3 100644
--- a/unittests/test_directories/example_substitutions/substitutions_parents.yml
+++ b/unittests/test_directories/example_substitutions/substitutions_parents.yml
@@ -5,6 +5,10 @@ ExperimentalData:  # name of the converter
   records:
     Project:
       name: project
+    RecordWithoutParents:
+      parents: []
+    RecordThatGetsParentsLater:
+      parents: []
   subtree:
     File:  # name of the converter
       type: SimpleFile
@@ -22,4 +26,13 @@ ExperimentalData:  # name of the converter
         Project:
           Experiments: +$Experiment
           dates: +20$year-$month-$day
-      
+
+        RecordThatGetsParentsLater:
+          parents:
+          - Month_$month  # This adds a special parent as record type
+    OtherFile:
+      type: SimpleFile
+      match: ".*\\.data"
+      records:
+        RecordThatGetsParentsLater:
+          someId: "23"
diff --git a/unittests/test_directories/examples_article/SimulationData/2020_climate-model-predict/2020-02-01/README.md b/unittests/test_directories/examples_article/SimulationData/2020_climate-model-predict/2020-02-01/README.md
index 0e7726d941d86ca357149f28ef98311b33808f11..0c91d6b5f7601334b84a77328b888d227e779a93 100644
--- a/unittests/test_directories/examples_article/SimulationData/2020_climate-model-predict/2020-02-01/README.md
+++ b/unittests/test_directories/examples_article/SimulationData/2020_climate-model-predict/2020-02-01/README.md
@@ -1,6 +1,6 @@
 ---
 responsible: AuthorE
-description: >
+description: >-
   Code for fitting the predictive model to the
   training data and for predicting the average
   annual temperature for all measurement stations
diff --git a/unittests/test_directories/examples_json/jsontest_cfood.yml b/unittests/test_directories/examples_json/jsontest_cfood.yml
index f1eb6a9fa186c07f551bd12a84050f544abfdabc..875773e6bf523500dba46abffda25c0edcb3abc4 100644
--- a/unittests/test_directories/examples_json/jsontest_cfood.yml
+++ b/unittests/test_directories/examples_json/jsontest_cfood.yml
@@ -9,25 +9,25 @@ JSONTest:  # name of the converter
         - Project  # not needed as the name is equivalent
   subtree:
     name_element:
-      type: DictTextElement
+      type: TextElement
       match_name: "name"
       match_value: "(?P<name>.*)"
       records:
         Project:
           name: $name
     url_element:  # name of the first subtree element which is a converter
-      type: DictTextElement
+      type: TextElement
       match_value: "(?P<url>.*)"
       match_name: "url"
       records:
         Project:
           url: $url
     persons_element:
-      type: DictListElement
+      type: ListElement
       match_name: "Person"
       subtree:
         person_element:
-          type: Dict
+          type: DictElement
           records:
             Person:
               parents:
@@ -36,21 +36,21 @@ JSONTest:  # name of the converter
               Person: +$Person
           subtree:
             firstname_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "firstname"
               match_value: "(?P<firstname>.*)"
               records:
                 Person:
                   firstname: $firstname
             lastname_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "lastname"
               match_value: "(?P<lastname>.*)"
               records:
                 Person:
                   lastname: $lastname
             email_element:
-              type: DictTextElement
+              type: TextElement
               match_name: "email"
               match_value: "(?P<email>.*)"
               records:
diff --git a/unittests/test_directories/single_file_test_data/identifiables.yml b/unittests/test_directories/single_file_test_data/identifiables.yml
index e32746d5a6984096cc46fa618250832b325965b0..c6f82be3dbf11db3f69e06d9a6fd2ee692901212 100644
--- a/unittests/test_directories/single_file_test_data/identifiables.yml
+++ b/unittests/test_directories/single_file_test_data/identifiables.yml
@@ -5,3 +5,7 @@ Keyword:
 Project:
   - project_id
   - title
+Unknown:
+  - propa
+  - is_referenced_by: [Some]
+
diff --git a/unittests/test_file_identifiables.py b/unittests/test_file_identifiables.py
index b0b9801993dc68fe473e788b8ca79a2244912676..aff174d0228d2750efd1cca129547c821c974127 100644
--- a/unittests/test_file_identifiables.py
+++ b/unittests/test_file_identifiables.py
@@ -8,63 +8,44 @@ import pytest
 from pytest import raises
 
 from caoscrawler.identifiable_adapters import LocalStorageIdentifiableAdapter
+from caoscrawler.identifiable import Identifiable
 
 
 def test_file_identifiable():
     ident = LocalStorageIdentifiableAdapter()
-    file_obj = db.File()
 
+    # Without a path there is no identifying information
+    with raises(ValueError):
+        ident.get_identifiable(db.File(), [])
+
+    fp = "/test/bla/bla.txt"
+    file_obj = db.File(path=fp)
     identifiable = ident.get_identifiable(file_obj)
-    identifiable2 = ident.get_identifiable_for_file(file_obj)
 
-    # these are two different objects:
-    assert identifiable != identifiable2
-    assert file_obj != identifiable
-    # ... but the path is equal:
-    assert identifiable.path == identifiable2.path
-    # ... and very boring:
-    assert identifiable.path is None
-    # Test functionality of retrieving the files:
-    identified_file = ident.get_file(identifiable)
-    identified_file2 = ident.get_file(file_obj)
-    # The both should be None currently as there are no files in the local store yet:
-    assert identified_file is None
-    assert identified_file2 is None
+    # the path is copied to the identifiable
+    assert fp == identifiable.path
+    assert isinstance(identifiable, Identifiable)
 
-    # Let's make it more interesting:
-    file_obj.path = "/test/bla/bla.txt"
-    file_obj._checksum = "abcd"
-    identifiable = ident.get_identifiable(file_obj)
-    assert file_obj != identifiable
-    assert file_obj.path == identifiable.path
-    # Checksum is not part of the identifiable:
-    assert file_obj.checksum != identifiable.checksum
+    # __eq__ function is only defined for Identifiable objects
+    with raises(ValueError):
+        file_obj != identifiable
 
-    # This is the wrong method, so it should definitely return None:
-    identified_file = ident.retrieve_identified_record_for_identifiable(
-        identifiable)
-    assert identified_file is None
-    # This is the correct method to use:
-    identified_file = ident.get_file(identifiable)
-    # or directly using:
-    identified_file2 = ident.get_file(file_obj)
-    # The both should be None currently as there are no files in the local store yet:
-    assert identified_file is None
-    assert identified_file2 is None
+    # since the path does not exist in the data in ident, the follwoing functions return None
+    assert ident.retrieve_identified_record_for_record(file_obj) is None
+    assert ident.get_file(identifiable) is None
 
     # Try again with actual files in the store:
     records = ident.get_records()
-    test_record_wrong_path = db.File(
-        path="/bla/bla/test.txt")
-    test_record_correct_path = db.File(
-        path="/test/bla/bla.txt")
-    test_record_alsocorrect_path = db.File(
-        path="/test/bla/bla.txt")
+    test_record_wrong_path = db.File(path="/bla/bla/test.txt")
+    test_record_correct_path = db.File(path="/test/bla/bla.txt")
+    test_record_alsocorrect_path = db.File(path="/test/bla/bla.txt")
     records.append(test_record_wrong_path)
+    # Now, there is a file, but still wrong path -> result is still None
     identified_file = ident.get_file(file_obj)
     assert identified_file is None
 
     records.append(test_record_correct_path)
+    # now there is a match
     identified_file = ident.get_file(file_obj)
     assert identified_file is not None
     assert identified_file.path == file_obj.path
diff --git a/unittests/test_identifiable.py b/unittests/test_identifiable.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f3c606b163df4dc238be9a669fd31eb630a582d
--- /dev/null
+++ b/unittests/test_identifiable.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2021 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+"""
+test identifiable module
+"""
+
+import pytest
+import caosdb as db
+from caoscrawler.identifiable import Identifiable
+from caoscrawler.identified_cache import IdentifiedCache
+
+
+def test_create_hashable_string():
+    assert Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B")) == "P<B>N<A>R<[]>"
+    assert Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", properties={'a': 5})) == "P<B>N<A>R<[]>a:5"
+    a = Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", properties={'a': 4, 'b': 5}))
+    b = Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", properties={'b': 5, 'a': 4}))
+    assert a == b
+    assert (
+        Identifiable._create_hashable_string(
+            Identifiable(name="A", record_type="B",
+                         properties={'a': db.Record(id=12)})
+        ) == "P<B>N<A>R<[]>a:12")
+    a = Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", properties={'a': [db.Record(id=12)]}))
+    assert (a == "P<B>N<A>R<[]>a:[12]")
+    assert (Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", properties={'a': [12]})) == "P<B>N<A>R<[]>a:[12]")
+    assert (
+        Identifiable._create_hashable_string(
+            Identifiable(name="A", record_type="B", properties={
+                         'a': [db.Record(id=12), 11]})
+        ) == "P<B>N<A>R<[]>a:[12, 11]")
+    assert (
+        Identifiable._create_hashable_string(
+            Identifiable(record_type="B", properties={'a': [db.Record()]})
+        ) != Identifiable._create_hashable_string(
+            Identifiable(record_type="B", properties={'a': [db.Record()]})))
+    assert Identifiable._create_hashable_string(
+        Identifiable(name="A", record_type="B", backrefs=[123, db.Entity(id=124)],
+                     properties={'a': 5})) == "P<B>N<A>R<['123', '124']>a:5"
+
+
+def test_name():
+    with pytest.raises(ValueError):
+        Identifiable(properties={"Name": 'li'})
+
+
+def test_repr():
+    # only test that something meaningful is returned
+    assert 'properties' in str(Identifiable(name="A", record_type="B"))
+    assert str(Identifiable(name="A", record_type="B", properties={'a': 0})).split(
+        "properties:\n")[1].split('\n')[0] == '{"a": 0}'
+    assert str(Identifiable(name="A", record_type="B", properties={'a': 0, 'b': "test"})).split(
+        "properties:\n")[1].split('\n')[0] == '{"a": 0, "b": "test"}'
+
+    # TODO(henrik): Add a test using backrefs once that's implemented.
+
+
+def test_equality():
+    assert Identifiable(
+        record_id=12, properties={"a": 0}) == Identifiable(record_id=12, properties={"a": 1})
+    assert Identifiable(
+        record_id=12, properties={"a": 0}) != Identifiable(record_id=13, properties={"a": 0})
+    assert Identifiable(
+        record_id=12, properties={"a": 0}) == Identifiable(properties={"a": 0})
+    assert Identifiable(
+        path="a", properties={"a": 0}) != Identifiable(path="b", properties={"a": 0})
+    assert Identifiable(
+        path="a", properties={"a": 0}) == Identifiable(path="a", properties={"a": 1})
+    assert Identifiable(
+        path="a", properties={"a": 0}) == Identifiable(properties={"a": 0})
+    assert Identifiable(properties={"a": 0}) == Identifiable(
+        properties={"a": 0})
+    assert Identifiable(properties={"a": 0}) != Identifiable(
+        properties={"a": 1})
diff --git a/unittests/test_identifiable_adapters.py b/unittests/test_identifiable_adapters.py
index ef7998a460c07342d30a3f769fd609c1045a9cca..6817b9e6993c0ec509354b68ff60d9a9caf534ae 100644
--- a/unittests/test_identifiable_adapters.py
+++ b/unittests/test_identifiable_adapters.py
@@ -31,35 +31,46 @@ import os
 from datetime import datetime
 from caoscrawler.identifiable_adapters import (
     CaosDBIdentifiableAdapter, IdentifiableAdapter)
+from caoscrawler.identifiable import Identifiable
 import caosdb as db
 
 
 def test_create_query_for_identifiable():
     query = IdentifiableAdapter.create_query_for_identifiable(
-        db.Record().add_parent("Person")
-        .add_property("first_name", value="A")
-        .add_property("last_name", value="B"))
+        Identifiable(record_type="Person", properties={"first_name": "A", "last_name": "B"}))
     assert query.lower() == "find record person with 'first_name'='a' and 'last_name'='b' "
 
     query = IdentifiableAdapter.create_query_for_identifiable(
-        db.Record(name="A").add_parent("B")
-        .add_property("c", value="c")
-        .add_property("d", value=5)
-        .add_property("e", value=5.5)
-        .add_property("f", value=datetime(2020, 10, 10))
-        .add_property("g", value=True)
-        .add_property("h", value=db.Record(id=1111))
-        .add_property("i", value=db.File(id=1112))
-        .add_property("j", value=[2222, db.Record(id=3333)]))
+        Identifiable(name="A", record_type="B", properties={
+            "c": "c",
+            "d": 5,
+            "e": 5.5,
+            "f": datetime(2020, 10, 10),
+            "g": True,
+            "h": db.Record(id=1111),
+            "i": db.File(id=1112),
+            "j": [2222, db.Record(id=3333)]}))
     assert (query.lower() == "find record b with name='a' and 'c'='c' and 'd'='5' and 'e'='5.5'"
             " and 'f'='2020-10-10t00:00:00' and 'g'='true' and 'h'='1111' and 'i'='1112' and "
             "'j'='2222' and 'j'='3333' ")
 
     # The name can be the only identifiable
     query = IdentifiableAdapter.create_query_for_identifiable(
-        db.Record(name="TestRecord").add_parent("TestType"))
+        Identifiable(name="TestRecord", record_type="TestType"))
     assert query.lower() == "find record testtype with name='testrecord'"
 
+    # With referencing entity (backref)
+    query = IdentifiableAdapter.create_query_for_identifiable(
+        Identifiable(record_type="Person", backrefs=[14433], properties={'last_name': "B"}))
+    assert query.lower() == ("find record person which is referenced by 14433 and with "
+                             "'last_name'='b' ")
+
+    # With two referencing entities (backref)
+    query = IdentifiableAdapter.create_query_for_identifiable(
+        Identifiable(record_type="Person", backrefs=[14433, 333], properties={'last_name': "B"}))
+    assert query.lower() == ("find record person which is referenced by 14433 and which is "
+                             "referenced by 333 and with 'last_name'='b' ")
+
 
 def test_load_from_yaml_file():
     ident = CaosDBIdentifiableAdapter()
diff --git a/unittests/test_identified_cache.py b/unittests/test_identified_cache.py
index 33add97d4309d87705144ec5331366d0bcd05541..4ed7c55c7326415308917e20e9f391b17b07ad87 100644
--- a/unittests/test_identified_cache.py
+++ b/unittests/test_identified_cache.py
@@ -27,40 +27,18 @@
 test identified_cache module
 """
 
-from caoscrawler.identified_cache import _create_hashable_string, IdentifiedCache
 import caosdb as db
-
-
-def test_create_hash():
-    assert _create_hashable_string(
-        db.Record("A").add_parent("B")) == "P<B>N<A>"
-    assert _create_hashable_string(db.Record("A")
-                                   .add_parent("B").add_property('a', 5)) == "P<B>N<A>a:5"
-    assert (_create_hashable_string(
-        db.Record("A").add_parent("B")
-        .add_property('a', 4).add_property('b', 5)) == _create_hashable_string(
-            db.Record("A").add_parent("B")
-            .add_property('b', 5).add_property('a', 4)))
-    assert (_create_hashable_string(db.Record("A")
-                                    .add_parent("B")
-                                    .add_property('a', db.Record(id=12))) == "P<B>N<A>a:12")
-    assert (_create_hashable_string(db.Record("A")
-                                    .add_parent("B")
-                                    .add_property('a', [db.Record(id=12)])) == "P<B>N<A>a:[12]")
-    assert (_create_hashable_string(db.Record("A")
-                                    .add_parent("B").add_property('a', [12])) == "P<B>N<A>a:[12]")
-    assert (_create_hashable_string(
-        db.Record("A")
-        .add_parent("B")
-        .add_property('a', [db.Record(id=12), 11])) == "P<B>N<A>a:[12, 11]")
+from caoscrawler.identifiable import Identifiable
+from caoscrawler.identified_cache import IdentifiedCache
 
 
 def test_IdentifiedCache():
-    ident = db.Record("A").add_parent("B")
+    ident = Identifiable(name="A", record_type="B")
     record = db.Record("A").add_parent("B").add_property('b', 5)
     cache = IdentifiedCache()
     assert ident not in cache
     cache.add(record=record, identifiable=ident)
     assert ident in cache
-    assert record not in cache
     assert cache[ident] is record
+    assert Identifiable(name="A", record_type="C") != Identifiable(name="A", record_type="B")
+    assert Identifiable(name="A", record_type="C") not in cache
diff --git a/unittests/test_issues.py b/unittests/test_issues.py
index 6e77b0c7f26f4b2970203cfc4b8cc786fe24121b..6b7b0d52ce5f4a1cfe5e4ac189d72eafd1454db7 100644
--- a/unittests/test_issues.py
+++ b/unittests/test_issues.py
@@ -22,20 +22,20 @@
 
 from pytest import mark
 
+import caosdb as db
+
 from caoscrawler.crawl import Crawler
-from caoscrawler.structure_elements import Dict
+from caoscrawler.identifiable import Identifiable
+from caoscrawler.identifiable_adapters import CaosDBIdentifiableAdapter
+from caoscrawler.structure_elements import DictElement
 from test_tool import rfp
 
 
-@mark.xfail(
-    reason="Wait until value conversion in dicts is fixed, see "
-    "https://gitlab.com/caosdb/caosdb-crawler/-/issues/10."
-)
 def test_issue_10():
     """Test integer-to-float conversion in dictionaries"""
     crawler_definition = {
         "DictTest": {
-            "type": "Dict",
+            "type": "DictElement",
             "match": "(.*)",
             "records": {
                 "TestRec": {}
@@ -63,8 +63,95 @@ def test_issue_10():
     }
 
     records = crawler.start_crawling(
-        Dict("TestDict", test_dict), crawler_definition, converter_registry)
+        DictElement("TestDict", test_dict), crawler_definition, converter_registry)
     assert len(records) == 1
     assert records[0].parents[0].name == "TestRec"
     assert records[0].get_property("float_prop") is not None
     assert float(records[0].get_property("float_prop").value) == 4.0
+
+
+def test_issue_39():
+    """Test for merge conflicts in
+    `crawl.Crawler.split_into_inserts_and_updates` (see
+    https://gitlab.com/caosdb/caosdb-crawler/-/issues/39).
+
+    """
+
+    crawler = Crawler(debug=True)
+
+    # For trying and failing to retrieve remotely identified records
+    def _fake_retrieve(*args, **kwargs):
+        return None
+
+    ident = CaosDBIdentifiableAdapter()
+    # identifiable property is just name for both Record Types
+    ident.register_identifiable("RT_A", db.RecordType().add_parent(
+        name="RT_A").add_property(name="name"))
+    ident.register_identifiable("RT_B", db.RecordType().add_parent(
+        name="RT_B").add_property(name="name"))
+    # overwrite retrieve
+    ident.retrieve_identified_record_for_identifiable = _fake_retrieve
+    crawler.identifiableadapter = ident
+
+    # a1 (has id) references b1 (has no id)
+    a1 = db.Record(name="A", id=101).add_parent(name="RT_A")
+    b1 = db.Record(name="B").add_parent(name="RT_B")
+    a1.add_property(name="RT_B", value=b1)
+
+    # a2 (no id) references b2 (has id)
+    a2 = db.Record(name="A").add_parent(name="RT_A")
+    b2 = db.Record(name="B", id=102).add_parent(name="RT_B")
+    a2.add_property(name="RT_B", value=b2)
+
+    flat_list = [b1, a1, a2, b2]
+
+    # the two records with ids exist remotely
+    crawler.add_to_remote_existing_cache(a1,
+                                         Identifiable(name="A", record_id=101, record_type="RT_A"))
+    crawler.add_to_remote_existing_cache(b2,
+                                         Identifiable(name="B", record_id=102, record_type="RT_B"))
+
+    # this would result in a merge conflict before
+    ins, ups = crawler.split_into_inserts_and_updates(flat_list)
+
+
+@mark.xfail(reason="FIX: https://gitlab.com/caosdb/caosdb-crawler/-/issues/47")
+def test_list_datatypes():
+    crawler_definition = {
+        "DictTest": {
+            "type": "DictElement",
+            "match": "(.*)",
+            "records": {
+                "Dataset": {}
+            },
+            "subtree": {
+                "int_element": {
+                    "type": "IntegerElement",
+                    "match_name": ".*",
+                    "match_value": "(?P<int_value>.*)",
+                    "records": {
+                        "Dataset": {
+                            "Subject": "+$int_value"
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    crawler = Crawler(debug=True)
+    converter_registry = crawler.load_converters(crawler_definition)
+
+    test_dict = {
+        "v1": 1233,
+        "v2": 1234
+    }
+
+    records = crawler.start_crawling(
+        DictElement("TestDict", test_dict), crawler_definition, converter_registry)
+    assert len(records) == 1
+    assert records[0].parents[0].name == "Dataset"
+    assert records[0].get_property("Subject") is not None
+    assert isinstance(records[0].get_property("Subject").value, list)
+    assert records[0].get_property("Subject").datatype is not None
+    assert records[0].get_property("Subject").datatype.startswith("LIST")
diff --git a/unittests/test_json.py b/unittests/test_json.py
index 97d9831de20a2b9f712294d1a0f6322789580f30..41fd31a43389148ad6fbc4167fd3fbd4f7f2ee9f 100644
--- a/unittests/test_json.py
+++ b/unittests/test_json.py
@@ -24,7 +24,7 @@
 #
 
 """
-module description
+test the JSON converter
 """
 import json
 import os
@@ -33,7 +33,7 @@ from pytest import raises
 
 import caosdb as db
 
-from caoscrawler.converters import JSONFileConverter, DictConverter
+from caoscrawler.converters import JSONFileConverter
 from caoscrawler.crawl import Crawler
 from caoscrawler.structure_elements import File, JSONFile
 from test_tool import rfp, dircheckstr
diff --git a/unittests/test_macros.py b/unittests/test_macros.py
index 7ac34cc7c48df3cb2855d7022119e4775d90c9a6..4e27e42f8d1e633cf97fa142e2c0ec8aa013af05 100644
--- a/unittests/test_macros.py
+++ b/unittests/test_macros.py
@@ -295,7 +295,6 @@ extroot3: !macro
     assert cfood["extroot3"]["test_four"] is None
 
 
-# @pytest.mark.xfail(reason="Fix multiple usage of the same macro.")
 def test_use_macro_twice():
     """Test that the same macro can be used twice with different parameters in
     the same CFood element if the name depends on the parameters.
@@ -328,3 +327,132 @@ extroot: !macro
     assert cfood["extroot"]["once"]["something"]["a"] == "4"
     assert cfood["extroot"]["twice"]["something"]["a"] == "5"
     assert cfood["extroot"]["default_name"]["something"]["a"] == "4"
+    # Code sample to generate the expanded macro:
+    # with open("expanded_test_macro.yaml", "w") as f:
+    #     f.write(yaml.dump(cfood))
+
+
+def test_documentation_example_2():
+
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+  - !defmacro
+    name: MarkdownFile
+    params:
+      name: null
+      filename: null
+    definition:
+      ${name}_filename:
+        type: SimpleFile
+        match: $filename
+        records:
+          $name:
+            parents:
+            - MarkdownFile
+            role: File
+            path: ${name}_filename
+            file: ${name}_filename
+---
+ExperimentalData:
+  type: Directory
+  match: ExperimentalData
+  subtree: !macro
+    MarkdownFile:
+    - name: README
+      filename: ^README.md$
+    """)
+
+    # Code sample to generate the expanded macro:
+    # with open("expanded_test_macro.yaml", "w") as f:
+    #     f.write(yaml.dump(cfood))
+
+
+def test_documentation_example_1():
+
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+  - !defmacro
+    name: SimulationDatasetFile
+    params:
+      match: null
+      recordtype: null
+      nodename: null
+    definition:
+      $nodename:
+        match: $match
+        type: SimpleFile
+        records:
+          File:
+            parents:
+            - $recordtype
+            role: File
+            path: $$$nodename
+            file: $$$nodename
+          Simulation:
+            $recordtype: +$File
+---
+SimulationData:
+  type: Directory
+  match: SimulationData
+  subtree: !macro
+    SimulationDatasetFile:
+    - match: .*
+      recordtype: DatasetFile
+      nodename: Dataset
+    """)
+
+    # Code sample to generate the expanded macro:
+    # with open("expanded_test_macro.yaml", "w") as f:
+    #     f.write(yaml.dump(cfood))
+
+
+@pytest.mark.xfail(
+    reason="Wait until this feature is implemented"
+    "https://gitlab.com/caosdb/caosdb-crawler/-/issues/21."
+)
+def test_def_replacements():
+    """Test that parameters in macro definitions can be used
+    for defining subsequent parameters.
+    """
+
+    cfood = _temp_file_load("""
+---
+metadata:
+  macros:
+    - !defmacro
+      name: test_def_replacements
+      params:
+        macro_name: default_name
+        z: $macro_name
+        a: $macro_name
+        v: $z
+      definition:
+        $macro_name:
+          macro_name: $macro_name
+          z: $z
+          a: $a
+          v: $v
+---
+extroot: !macro
+  test_def_replacements:
+  - macro_name: once
+  - macro_name: twice
+    z: 5
+  - {}
+    """)
+    assert cfood["extroot"]["once"]["z"] == "once"
+    assert cfood["extroot"]["once"]["a"] == "once"
+    assert cfood["extroot"]["once"]["v"] == "once"
+    assert cfood["extroot"]["once"]["macro_name"] == "once"
+    assert cfood["extroot"]["twice"]["z"] == "5"
+    assert cfood["extroot"]["twice"]["a"] == "5"
+    assert cfood["extroot"]["twice"]["v"] == "5"
+    assert cfood["extroot"]["twice"]["macro_name"] == "twice"
+    assert cfood["extroot"]["default_name"]["z"] == "default_name"
+    assert cfood["extroot"]["default_name"]["a"] == "default_name"
+    assert cfood["extroot"]["default_name"]["v"] == "default_name"
+    assert cfood["extroot"]["default_name"]["macro_name"] == "default_name"
diff --git a/unittests/test_table_converter.py b/unittests/test_table_converter.py
index 85255d3efd34dc666d5d2e97423f33177dea6732..abe4ac85ec4fc0a78e71c177222817e1b84e9e56 100644
--- a/unittests/test_table_converter.py
+++ b/unittests/test_table_converter.py
@@ -31,10 +31,8 @@ from caoscrawler.stores import GeneralStore
 from caoscrawler.converters import (ConverterValidationError,
                                     DictConverter, XLSXTableConverter, CSVTableConverter)
 from caoscrawler.structure_elements import Directory
-from caoscrawler.structure_elements import (File, DictTextElement,
-                                            DictListElement, DictElement,
-                                            DictBooleanElement, DictDictElement,
-                                            DictIntegerElement, DictFloatElement)
+from caoscrawler.structure_elements import (File, TextElement, ListElement, DictElement,
+                                            BooleanElement, IntegerElement, FloatElement)
 
 from os.path import join, dirname, basename
 
@@ -63,18 +61,17 @@ def converter_registry():
         "XLSXTableConverter": {
             "converter": "XLSXTableConverter",
             "package": "caoscrawler.converters"},
-
-        "DictDictElement": {
-            "converter": "DictDictElementConverter",
+        "DictElement": {
+            "converter": "DictElementConverter",
             "package": "caoscrawler.converters"},
-        "DictTextElement": {
-            "converter": "DictTextElementConverter",
+        "TextElement": {
+            "converter": "TextElementConverter",
             "package": "caoscrawler.converters"},
-        "DictIntegerElement": {
-            "converter": "DictIntegerElementConverter",
+        "IntegerElement": {
+            "converter": "IntegerElementConverter",
             "package": "caoscrawler.converters"},
-        "DictFloatElement": {
-            "converter": "DictFloatElementConverter",
+        "FloatElement": {
+            "converter": "FloatElementConverter",
             "package": "caoscrawler.converters"},
     }
 
diff --git a/unittests/test_tool.py b/unittests/test_tool.py
index a190efdeaaa9b3ede8d6fc1b9d1fb2d6e0d9c210..71180b17e22409bc2491a51d4cdd45ed6f4aa346 100755
--- a/unittests/test_tool.py
+++ b/unittests/test_tool.py
@@ -4,6 +4,7 @@
 # A. Schlemmer, 06/2021
 
 from caoscrawler.crawl import Crawler, SecurityMode
+from caoscrawler.identifiable import Identifiable
 from caoscrawler.structure_elements import File, DictTextElement, DictListElement
 from caoscrawler.identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
 from simulated_server_data import full_data
@@ -182,15 +183,6 @@ def test_record_structure_generation(crawler):
 #     ident.store_state(rfp("records.xml"))
 
 
-def test_ambigious_records(crawler, ident):
-    ident.get_records().clear()
-    ident.get_records().extend(crawler.target_data)
-    r = ident.get_records()
-    id_r0 = ident.get_identifiable(r[0])
-    with raises(RuntimeError, match=".*unambigiously.*"):
-        ident.retrieve_identified_record_for_identifiable(id_r0)
-
-
 def test_crawler_update_list(crawler, ident):
     # If the following assertions fail, that is a hint, that the test file records.xml has changed
     # and this needs to be updated:
@@ -206,7 +198,7 @@ def test_crawler_update_list(crawler, ident):
     ) == 2
 
     # The crawler contains lots of duplicates, because identifiables have not been resolved yet:
-    assert len(ident.get_records()) != len(crawler.target_data)
+    assert len(ident.get_records()) != len(crawler.crawled_data)
 
     # Check consistency:
     # Check whether identifiables retrieved from current identifiable store return
@@ -219,13 +211,12 @@ def test_crawler_update_list(crawler, ident):
             break
 
     id_r0 = ident.get_identifiable(r_cur)
-    assert r_cur.parents[0].name == id_r0.parents[0].name
+    assert r_cur.parents[0].name == id_r0.record_type
     assert r_cur.get_property(
-        "first_name").value == id_r0.get_property("first_name").value
+        "first_name").value == id_r0.properties["first_name"]
     assert r_cur.get_property(
-        "last_name").value == id_r0.get_property("last_name").value
+        "last_name").value == id_r0.properties["last_name"]
     assert len(r_cur.parents) == 1
-    assert len(id_r0.parents) == 1
     assert len(r_cur.properties) == 2
     assert len(id_r0.properties) == 2
 
@@ -240,14 +231,13 @@ def test_crawler_update_list(crawler, ident):
             break
 
     id_r1 = ident.get_identifiable(r_cur)
-    assert r_cur.parents[0].name == id_r1.parents[0].name
+    assert r_cur.parents[0].name == id_r1.record_type
     assert r_cur.get_property(
-        "identifier").value == id_r1.get_property("identifier").value
-    assert r_cur.get_property("date").value == id_r1.get_property("date").value
+        "identifier").value == id_r1.properties["identifier"]
+    assert r_cur.get_property("date").value == id_r1.properties["date"]
     assert r_cur.get_property(
-        "project").value == id_r1.get_property("project").value
+        "project").value == id_r1.properties["project"]
     assert len(r_cur.parents) == 1
-    assert len(id_r1.parents) == 1
     assert len(r_cur.properties) == 4
     assert len(id_r1.properties) == 3
 
@@ -262,21 +252,6 @@ def test_crawler_update_list(crawler, ident):
         "responsible").value == idr_r1.get_property("responsible").value
     assert r_cur.description == idr_r1.description
 
-    # test whether compare_entites function works in this context:
-    comp = compare_entities(r_cur, id_r1)
-    assert len(comp[0]["parents"]) == 0
-    assert len(comp[1]["parents"]) == 0
-    assert len(comp[0]["properties"]) == 1
-    assert len(comp[1]["properties"]) == 0
-    assert "responsible" in comp[0]["properties"]
-    assert "description" in comp[0]
-
-    comp = compare_entities(r_cur, idr_r1)
-    assert len(comp[0]["parents"]) == 0
-    assert len(comp[1]["parents"]) == 0
-    assert len(comp[0]["properties"]) == 0
-    assert len(comp[1]["properties"]) == 0
-
 
 def test_synchronization(crawler, ident):
     insl, updl = crawler.synchronize(commit_changes=False)
@@ -284,20 +259,12 @@ def test_synchronization(crawler, ident):
     assert len(updl) == 0
 
 
-def test_identifiable_adapter():
-    query = IdentifiableAdapter.create_query_for_identifiable(
-        db.Record().add_parent("Person")
-        .add_property("first_name", value="A")
-        .add_property("last_name", value="B"))
-    assert query.lower() == "find record person with 'first_name'='a' and 'last_name'='b' "
-
-
 def test_remove_unnecessary_updates():
     # test trvial case
     upl = [db.Record().add_parent("A")]
     irs = [db.Record().add_parent("A")]
-    Crawler.remove_unnecessary_updates(upl, irs)
-    assert len(upl) == 0
+    updates = Crawler.remove_unnecessary_updates(upl, irs)
+    assert len(updates) == 0
 
     # test property difference case
     # TODO this should work right?
@@ -309,24 +276,24 @@ def test_remove_unnecessary_updates():
     # test value difference case
     upl = [db.Record().add_parent("A").add_property("a", 5)]
     irs = [db.Record().add_parent("A").add_property("a")]
-    Crawler.remove_unnecessary_updates(upl, irs)
-    assert len(upl) == 1
+    updates = Crawler.remove_unnecessary_updates(upl, irs)
+    assert len(updates) == 1
     upl = [db.Record().add_parent("A").add_property("a", 5)]
     irs = [db.Record().add_parent("A").add_property("a", 5)]
-    Crawler.remove_unnecessary_updates(upl, irs)
-    assert len(upl) == 0
+    updates = Crawler.remove_unnecessary_updates(upl, irs)
+    assert len(updates) == 0
 
     # test unit difference case
     upl = [db.Record().add_parent("A").add_property("a", unit='cm')]
     irs = [db.Record().add_parent("A").add_property("a")]
-    Crawler.remove_unnecessary_updates(upl, irs)
-    assert len(upl) == 1
+    updates = Crawler.remove_unnecessary_updates(upl, irs)
+    assert len(updates) == 1
 
     # test None difference case
     upl = [db.Record().add_parent("A").add_property("a")]
     irs = [db.Record().add_parent("A").add_property("a", 5)]
-    Crawler.remove_unnecessary_updates(upl, irs)
-    assert len(upl) == 1
+    updates = Crawler.remove_unnecessary_updates(upl, irs)
+    assert len(updates) == 1
 
 
 # Current status:
@@ -339,7 +306,7 @@ def test_identifiable_adapter_no_identifiable(crawler, ident):
     insl, updl = crawler.synchronize()
     assert len(updl) == 0
 
-    pers = [r for r in crawler.target_data if r.parents[0].name == "Person"]
+    pers = [r for r in crawler.crawled_data if r.parents[0].name == "Person"]
     # All persons are inserted, because they are not identifiable:
     assert len(insl) == len(pers)
 
@@ -359,7 +326,11 @@ def test_provenance_debug_data(crawler):
     assert check_key_count("Person") == 14
 
 
-def basic_retrieve_by_name_mock_up(rec, known):
+def test_split_into_inserts_and_updates_trivial(crawler):
+    crawler.split_into_inserts_and_updates([])
+
+
+def basic_retrieve_by_name_mock_up(rec, referencing_entities=None, known=None):
     """ returns a stored Record if rec.name is an existing key, None otherwise """
     if rec.name in known:
         return known[rec.name]
@@ -377,27 +348,26 @@ def crawler_mocked_identifiable_retrieve(crawler):
     # There is only a single known Record with name A
     crawler.identifiableAdapter.retrieve_identified_record_for_record = Mock(side_effect=partial(
         basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable = Mock(
+        side_effect=partial(
+            basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
     return crawler
 
 
-def test_split_into_inserts_and_updates_trivial(crawler):
-    # Try trivial argument
-    crawler.split_into_inserts_and_updates([])
-
-
 def test_split_into_inserts_and_updates_single(crawler_mocked_identifiable_retrieve):
     crawler = crawler_mocked_identifiable_retrieve
+    identlist = [Identifiable(name="A", record_type="C"), Identifiable(name="B", record_type="C")]
     entlist = [db.Record(name="A").add_parent(
         "C"), db.Record(name="B").add_parent("C")]
 
-    assert crawler.get_identified_record_from_local_cache(entlist[0]) is None
-    assert crawler.get_identified_record_from_local_cache(entlist[1]) is None
-    assert crawler.can_be_checked_externally(entlist[0])
-    assert crawler.can_be_checked_externally(entlist[1])
+    assert crawler.get_from_any_cache(identlist[0]) is None
+    assert crawler.get_from_any_cache(identlist[1]) is None
+    assert not crawler._has_reference_value_without_id(identlist[0])
+    assert not crawler._has_reference_value_without_id(identlist[1])
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
-        entlist[0]).id == 1111
+        identlist[0]).id == 1111
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
-        entlist[1]) is None
+        identlist[1]) is None
 
     insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist))
     assert len(insert) == 1
@@ -406,7 +376,7 @@ def test_split_into_inserts_and_updates_single(crawler_mocked_identifiable_retri
     assert update[0].name == "A"
     # if this ever fails, the mock up may be removed
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
-    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable.assert_called()
 
 
 def test_split_into_inserts_and_updates_with_duplicate(crawler_mocked_identifiable_retrieve):
@@ -424,7 +394,7 @@ def test_split_into_inserts_and_updates_with_duplicate(crawler_mocked_identifiab
     assert update[0].name == "A"
     # if this ever fails, the mock up may be removed
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
-    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable.assert_called()
 
 
 def test_split_into_inserts_and_updates_with_ref(crawler_mocked_identifiable_retrieve):
@@ -440,8 +410,8 @@ def test_split_into_inserts_and_updates_with_ref(crawler_mocked_identifiable_ret
     assert len(update) == 1
     assert update[0].name == "A"
     # if this ever fails, the mock up may be removed
-    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable.assert_called()
 
 
 def test_split_into_inserts_and_updates_with_circ(crawler):
@@ -476,7 +446,7 @@ def test_split_into_inserts_and_updates_with_complex(crawler_mocked_identifiable
     assert update[0].name == "A"
     # if this ever fails, the mock up may be removed
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
-    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable.assert_called()
 
     # TODO write test where the unresoled entity is not part of the identifiable
 
@@ -495,48 +465,70 @@ def test_split_into_inserts_and_updates_with_copy_attr(crawler_mocked_identifiab
     assert update[0].get_property("foo").value == 1
     # if this ever fails, the mock up may be removed
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
-    crawler.identifiableAdapter.retrieve_identified_record_for_record.assert_called()
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable.assert_called()
 
 
-def test_all_references_are_existing_already(crawler):
+def test_has_missing_object_in_references(crawler):
     # Simulate remote server content by using the names to identify records
     # There are only two known Records with name A and B
     crawler.identifiableAdapter.get_registered_identifiable = Mock(side_effect=partial(
-        basic_retrieve_by_name_mock_up, known={"A": db.Record(name="A").add_parent("C"),
-                                               "B": db.Record(name="B").add_parent("C")}))
-
-    assert crawler.all_references_are_existing_already(
-        db.Record().add_property('a', 123))
-    assert crawler.all_references_are_existing_already(db.Record()
-                                                       .add_property('a', db.Record(id=123)))
-    assert crawler.all_references_are_existing_already(db.Record()
-                                                       .add_property('a', 123)
-                                                       .add_property('b', db.Record(id=123)))
-    assert not crawler.all_references_are_existing_already(db.Record()
-                                                           .add_property('a', 123)
-                                                           .add_property('b', db.Record(name="A")
-                                                                         .add_parent("C")))
-    a = db.Record(name="A").add_parent("C")
-    crawler.add_identified_record_to_local_cache(a)
-    assert crawler.all_references_are_existing_already(db.Record()
-                                                       .add_property('a', 123)
-                                                       .add_property('b', a))
+        basic_retrieve_by_name_mock_up, known={"C": db.Record(name="C").add_parent("RTC")
+                                               .add_property("d"),
+                                               "D": db.Record(name="D").add_parent("RTD")
+                                               .add_property("d").add_property("e"),
+                                               }))
+
+    # one reference with id -> check
+    assert not crawler._has_missing_object_in_references(
+        Identifiable(name="C", record_type="RTC", properties={'d': 123}), [])
+    # one ref with Entity with id -> check
+    assert not crawler._has_missing_object_in_references(
+        Identifiable(name="C", record_type="RTC", properties={'d': db.Record(id=123)
+                                                              .add_parent("C")}), [])
+    # one ref with id one with Entity with id (mixed) -> check
+    assert not crawler._has_missing_object_in_references(
+        Identifiable(name="C", record_type="RTD",
+                     properties={'d': 123, 'b': db.Record(id=123).add_parent("RTC")}), [])
+    # entity to be referenced in the following
+    a = db.Record(name="C").add_parent("C").add_property("d", 12311)
+    # one ref with id one with Entity without id (but not identifying) -> fail
+    assert not crawler._has_missing_object_in_references(
+        Identifiable(name="C", record_type="RTC", properties={'d': 123, 'e': a}), [])
+
+    # one ref with id one with Entity without id (mixed) -> fail
+    assert not crawler._has_missing_object_in_references(
+        Identifiable(name="D", record_type="RTD", properties={'d': 123, 'e': a}), [])
+
+    crawler.add_to_remote_missing_cache(a, Identifiable(name="C", record_type="RTC",
+                                                        properties={'d': 12311}))
+    # one ref with id one with Entity without id but in cache -> check
+    assert crawler._has_missing_object_in_references(
+        Identifiable(name="D", record_type="RTD", properties={'d': 123, 'e': a}), [])
+
     # if this ever fails, the mock up may be removed
     crawler.identifiableAdapter.get_registered_identifiable.assert_called()
 
 
-def test_can_be_checked_externally(crawler):
-    assert crawler.can_be_checked_externally(
-        db.Record().add_property('a', 123))
-    assert crawler.can_be_checked_externally(db.Record()
-                                             .add_property('a', db.Record(id=123)))
-    assert crawler.can_be_checked_externally(db.Record()
-                                             .add_property('a', 123)
-                                             .add_property('b', db.Record(id=123)))
-
-    assert not crawler.can_be_checked_externally(db.Record()
-                                                 .add_property('a', 123)
-                                                 .add_property('b', db.Record()))
+@pytest.mark.xfail()
+def test_references_entities_without_ids(crawler, ident):
+    assert not crawler._has_reference_value_without_id(db.Record().add_parent("Person")
+                                                       .add_property('last_name', 123)
+                                                       .add_property('first_name', 123))
+    # id and rec with id
+    assert not crawler._has_reference_value_without_id(db.Record().add_parent("Person")
+                                                       .add_property('first_name', 123)
+                                                       .add_property('last_name',
+                                                                     db.Record(id=123)))
+    # id and rec with id and one unneeded prop
+    assert crawler._has_reference_value_without_id(db.Record().add_parent("Person")
+                                                   .add_property('first_name', 123)
+                                                   .add_property('stuff', db.Record())
+                                                   .add_property('last_name', db.Record(id=123)))
+
+    # one identifying prop is missing
+    assert crawler._has_reference_value_without_id(db.Record().add_parent("Person")
+                                                   .add_property('first_name', 123)
+                                                   .add_property('last_name', db.Record()))
 
 
 def test_replace_entities_with_ids(crawler):
@@ -582,24 +574,37 @@ def reset_mocks(mocks):
 
 
 def change_identifiable_prop(ident):
-    # the checks in here are only to make sure we change the record as we intend to
-    meas = ident._records[-2]
-    assert meas.parents[0].name == "Measurement"
-    resps = meas.properties[0]
-    assert resps.name == "date"
-    # change one element; This changes the date which is part of the identifiable
-    resps.value = "2022-01-04"
+    """
+    This function is supposed to change a non identifiing property.
+    """
+    for ent in ident._records:
+        if len(ent.parents) == 0 or ent.parents[0].name != "Measurement":
+            continue
+        for prop in ent.properties:
+            if prop.name != "date":
+                continue
+            # change one element; This removes a responsible which is not part of the identifiable
+            prop.value = "2022-01-04"
+            return
+    # If it does not work, this test is not implemented properly
+    raise RuntimeError("Did not find the property that should be changed.")
 
 
 def change_non_identifiable_prop(ident):
-    # the checks in here are only to make sure we change the record as we intend to
-    meas = ident._records[-1]
-    assert meas.parents[0].name == "Measurement"
-    resps = meas.properties[-1]
-    assert resps.name == "responsible"
-    assert len(resps.value) == 2
-    # change one element; This removes a responsible which is not part of the identifiable
-    del resps.value[-1]
+    """
+    This function is supposed to change a non identifiing property.
+    """
+    for ent in ident._records:
+        if len(ent.parents) == 0 or ent.parents[0].name != "Measurement":
+            continue
+
+        for prop in ent.properties:
+            if prop.name != "responsible" or len(prop.value) < 2:
+                continue
+            # change one element; This removes a responsible which is not part of the identifiable
+            del prop.value[-1]
+            return
+    raise RuntimeError("Did not find the property that should be changed.")
 
 
 @patch("caoscrawler.crawl.Crawler._get_entity_by_id",
@@ -691,3 +696,119 @@ def test_security_mode(updateCacheMock, upmock, insmock, ident):
     reset_mocks([updateCacheMock, insmock, upmock])
     # restore original ident
     ident._records = deepcopy(records_backup)
+
+
+def test_create_reference_mapping():
+    a = db.Record().add_parent("A")
+    b = db.Record().add_parent("B").add_property('a', a)
+    ref = Crawler.create_reference_mapping([a, b])
+    assert id(a) in ref
+    assert id(b) not in ref
+    assert "B" in ref[id(a)]
+    assert ref[id(a)]["B"] == [b]
+
+
+def test_create_flat_list():
+    a = db.Record()
+    a.add_property(name="a", value=a)
+    Crawler.create_flat_list([a], [])
+
+
+@pytest.fixture
+def crawler_mocked_for_backref_test(crawler):
+    # mock retrieval of registered identifiabls: return Record with just a parent
+    def get_reg_ident(x):
+        if x.parents[0].name == "C":
+            return db.Record().add_parent(x.parents[0].name).add_property(
+                "is_referenced_by", value=["BR"])
+        elif x.parents[0].name == "D":
+            return db.Record().add_parent(x.parents[0].name).add_property(
+                "is_referenced_by", value=["BR", "BR2"])
+        else:
+            return db.Record().add_parent(x.parents[0].name)
+    crawler.identifiableAdapter.get_registered_identifiable = Mock(side_effect=get_reg_ident)
+
+    # Simulate remote server content by using the names to identify records
+    # There is only a single known Record with name A
+    crawler.identifiableAdapter.retrieve_identified_record_for_record = Mock(side_effect=partial(
+        basic_retrieve_by_name_mock_up, known={"A":
+                                               db.Record(id=1111, name="A").add_parent("BR")}))
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable = Mock(
+        side_effect=partial(
+            basic_retrieve_by_name_mock_up, known={"A":
+                                                   db.Record(id=1111, name="A").add_parent("BR")}))
+    return crawler
+
+
+def test_split_into_inserts_and_updates_backref(crawler_mocked_for_backref_test):
+    crawler = crawler_mocked_for_backref_test
+    identlist = [Identifiable(name="A", record_type="BR"),
+                 Identifiable(name="B", record_type="C", backrefs=[db.Entity()])]
+    referenced = db.Record(name="B").add_parent("C")
+    entlist = [referenced, db.Record(name="A").add_parent("BR").add_property("ref", referenced), ]
+
+    # Test without referencing object
+    # currently a NotImplementedError is raised if necessary properties are missing.
+    with raises(NotImplementedError):
+        crawler.split_into_inserts_and_updates([db.Record(name="B").add_parent("C")])
+
+    # identifiables were not yet checked
+    assert crawler.get_from_any_cache(identlist[0]) is None
+    assert crawler.get_from_any_cache(identlist[1]) is None
+    # one with reference, one without
+    assert not crawler._has_reference_value_without_id(identlist[0])
+    assert crawler._has_reference_value_without_id(identlist[1])
+    # one can be found remotely, one not
+    assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
+        identlist[0]).id == 1111
+    assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
+        identlist[1]) is None
+
+    # check the split...
+    insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist))
+    # A was found remotely and is therefore in the update list
+    assert len(update) == 1
+    assert update[0].name == "A"
+    # B does not exist on the (simulated) remote server
+    assert len(insert) == 1
+    assert insert[0].name == "B"
+
+
+def test_split_into_inserts_and_updates_mult_backref(crawler_mocked_for_backref_test):
+    # test whether multiple references of the same record type are correctly used
+    crawler = crawler_mocked_for_backref_test
+    referenced = db.Record(name="B").add_parent("C")
+    entlist = [referenced,
+               db.Record(name="A").add_parent("BR").add_property("ref", referenced),
+               db.Record(name="C").add_parent("BR").add_property("ref", referenced),
+               ]
+
+    # test whether both entities are listed in the backref attribute of the identifiable
+    referencing_entities = crawler.create_reference_mapping(entlist)
+    identifiable = crawler.identifiableAdapter.get_identifiable(referenced, referencing_entities)
+    assert len(identifiable.backrefs) == 2
+
+    # check the split...
+    insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist))
+    assert len(update) == 1
+    assert len(insert) == 2
+
+
+def test_split_into_inserts_and_updates_diff_backref(crawler_mocked_for_backref_test):
+    # test whether multiple references of the different record types are correctly used
+    crawler = crawler_mocked_for_backref_test
+    referenced = db.Record(name="B").add_parent("D")
+    entlist = [referenced,
+               db.Record(name="A").add_parent("BR").add_property("ref", referenced),
+               db.Record(name="A").add_parent("BR2").add_property("ref", referenced),
+               ]
+
+    # test whether both entities are listed in the backref attribute of the identifiable
+    referencing_entities = crawler.create_reference_mapping(entlist)
+    identifiable = crawler.identifiableAdapter.get_identifiable(referenced, referencing_entities)
+    assert len(identifiable.backrefs) == 2
+
+    # check the split...
+    insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist))
+    assert len(update) == 2
+    assert len(insert) == 1
diff --git a/unittests/test_variable_substitutions.py b/unittests/test_variable_substitutions.py
index 203197b7f8af51605a413ac354a0426d61c9c0cb..f6c3b6375a3111faff9d746779805ba16af260b7 100644
--- a/unittests/test_variable_substitutions.py
+++ b/unittests/test_variable_substitutions.py
@@ -83,3 +83,18 @@ def test_substitutions_parents(crawler_2):
     assert len(parents) == 2
     assert parents[0].name == "Experiment"
     assert parents[1].name == "Month_05"
+
+
+def test_empty_parents(crawler_2):
+    # This is a test for:
+    # https://gitlab.com/caosdb/caosdb-crawler/-/issues/8
+
+    subd = crawler_2.debug_tree[dircheckstr(
+        "File", "ExperimentalData", "220512_data.dat")]
+
+    parents = subd[1]["RecordWithoutParents"].get_parents()
+    assert len(parents) == 0
+
+    parents = subd[1]["RecordThatGetsParentsLater"].get_parents()
+    assert len(parents) == 1
+    assert parents[0].name == "Month_05"