diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 80f38616e1abbe4e560d9bbfbd2542d02bfe5007..d2abdcd653c3315335c29058a8ca2774dad34577 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -131,7 +131,7 @@ unittest_py39:
   script:
     # First verify that system Python actually is 3.9
     - python3 -c "import sys; assert sys.version.startswith('3.9')"
-    - python3 -c "import caosdb; print('CaosDB Version:', caosdb.__version__)"
+    - python3 -c "import linkahead; print('LinkAhead Version:', linkahead.__version__)"
     - tox
 
 unittest_py37:
@@ -161,7 +161,6 @@ unittest_py311:
   stage: unittest
   image: python:3.11
   script: *python_test_script
-  allow_failure: true
 
 # Build the sphinx documentation and make it ready for deployment by Gitlab Pages
 # Special job for serving a static website. See https://docs.gitlab.com/ee/ci/yaml/README.html#pages
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4ef346c32bb278bc91f0c5e65b13ce58b5f67a8f..108c0cd02724a9f38d8ebe5b54ec07061879a094 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -12,6 +12,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 * Added member `BaseTableExporter.all_keys`
 * Parsing from YAML now allows to give an existing model to which the YAML data model shall be
   added.
+* The `json_schema_exporter` module which introduces tools to create a json
+  schema from a RecordType, e.g., for the useage in web forms.
 
 ### Changed ###
 
diff --git a/integrationtests/test.sh b/integrationtests/test.sh
index 9f8d003c9219f7e243fd50c5d846b9a7450b9c7b..07293254de9c09cc8ace8cd6b2f3f426464ccd62 100755
--- a/integrationtests/test.sh
+++ b/integrationtests/test.sh
@@ -16,7 +16,7 @@ fi
 OUT=/tmp/crawler.output
 ls 
 cat pycaosdb.ini
-python3 -c "import caosdb; print('CaosDB Version:', caosdb.__version__)"
+python3 -c "import linkahead; print('LinkAhead Version:', linkahead.__version__)"
 rm -rf /tmp/caosdb_identifiable_cache.db
 set -e
 echo "Clearing database"
diff --git a/integrationtests/test_cache.py b/integrationtests/test_cache.py
index da1824e8c3cdf3f68bb82f8c7f39e6eecb111f92..aacef1792e6028bf056093c517f45f6367f471d6 100644
--- a/integrationtests/test_cache.py
+++ b/integrationtests/test_cache.py
@@ -63,6 +63,12 @@ class CacheTest(unittest.TestCase):
 
         update = UpdateCache(db_file=self.cache)
         run_id = "a"
+        print(db.execute_query("FIND Record TestRecord", unique=True))
+        print(db.execute_query("FIND entity with id="+str(rec.id), unique=True))
+        try:
+            print(db.execute_query("FIND Record "+str(rec.id), unique=True))
+        except BaseException:
+            print("Query does not work as expected")
         update.insert(cont, run_id)
         assert len(update.get_updates(run_id)) == 1
 
diff --git a/setup.py b/setup.py
index 6fb337363ed7e3500c4f9b7c7fa64ed35135f9fb..f71fd2616bc61ef1b8f1b36048ee5380f215211d 100755
--- a/setup.py
+++ b/setup.py
@@ -155,9 +155,9 @@ def setup_package():
         author='Henrik tom Wörden',
         author_email='h.tomwoerden@indiscale.com',
         python_requires='>=3.7',
-        install_requires=["caosdb>=0.11.0",
+        install_requires=["linkahead>=0.13.1",
                           "jsonref",
-                          "jsonschema>=4.4.0",
+                          "jsonschema[format]>=4.4.0",
                           "numpy>=1.17.3",
                           "openpyxl>=3.0.7",
                           "pandas>=1.2.0",
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/__init__.py b/src/caosadvancedtools/bloxberg/swagger_client/__init__.py
index 136c5b27a37cfbd9135230468ae5a29cb0eb2b77..255d6d3124dc352f10366e22f1eb8b461ff6593d 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/__init__.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/__init__.py
@@ -13,22 +13,23 @@
 """
 
 from __future__ import absolute_import
+from swagger_client.models.validation_error import ValidationError
+from swagger_client.models.http_validation_error import HTTPValidationError
+from swagger_client.models.controller_cert_tools_generate_unsigned_certificate_json_certificate import ControllerCertToolsGenerateUnsignedCertificateJsonCertificate
+from swagger_client.models.controller_cert_tools_generate_pdf_json_certificate import ControllerCertToolsGeneratePdfJsonCertificate
+from swagger_client.models.batch import Batch
+from swagger_client.configuration import Configuration
+from swagger_client.api_client import ApiClient
+from swagger_client.api.pdf_api import PdfApi
+from swagger_client.api.certificate_api import CertificateApi
 
 # Fake the installation
-import sys, pathlib
+import sys
+import pathlib
 __this_dir = str(pathlib.Path(__file__).parent.parent)
 if __this_dir not in sys.path:
     sys.path.append(__this_dir)
 
 # import apis into sdk package
-from swagger_client.api.certificate_api import CertificateApi
-from swagger_client.api.pdf_api import PdfApi
 # import ApiClient
-from swagger_client.api_client import ApiClient
-from swagger_client.configuration import Configuration
 # import models into sdk package
-from swagger_client.models.batch import Batch
-from swagger_client.models.controller_cert_tools_generate_pdf_json_certificate import ControllerCertToolsGeneratePdfJsonCertificate
-from swagger_client.models.controller_cert_tools_generate_unsigned_certificate_json_certificate import ControllerCertToolsGenerateUnsignedCertificateJsonCertificate
-from swagger_client.models.http_validation_error import HTTPValidationError
-from swagger_client.models.validation_error import ValidationError
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/api_client.py b/src/caosadvancedtools/bloxberg/swagger_client/api_client.py
index 25e6501a4e36b09bca266f2eb375807053a58870..7337ca334c545b2c2502a20cb5369db331149037 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/api_client.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/api_client.py
@@ -591,7 +591,7 @@ class ApiClient(object):
             )
 
     def __hasattr(self, object, name):
-            return name in object.__class__.__dict__
+        return name in object.__class__.__dict__
 
     def __deserialize_model(self, data, klass):
         """Deserializes list or dict to model.
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/models/batch.py b/src/caosadvancedtools/bloxberg/swagger_client/models/batch.py
index 7a347cf7ac9148df8ec9a43200f4058f127447b9..474ca01a69a6a06c93b7e9a640695fa709890997 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/models/batch.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/models/batch.py
@@ -15,6 +15,7 @@ import re  # noqa: F401
 
 import six
 
+
 class Batch(object):
     """NOTE: This class is auto generated by the swagger code generator program.
 
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_pdf_json_certificate.py b/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_pdf_json_certificate.py
index 2d7fd2d763ba40c9a384203301aa3e70efdf7783..8c1b50d8816b09c1a466cf7d11cee1ca605dfd3a 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_pdf_json_certificate.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_pdf_json_certificate.py
@@ -15,6 +15,7 @@ import re  # noqa: F401
 
 import six
 
+
 class ControllerCertToolsGeneratePdfJsonCertificate(object):
     """NOTE: This class is auto generated by the swagger code generator program.
 
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_unsigned_certificate_json_certificate.py b/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_unsigned_certificate_json_certificate.py
index 4a6d2d3f0e15faa8672f001e964d66c6e0a27780..fa0da3cb0c09e384cdddbd4ce458a4baf14f4b5d 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_unsigned_certificate_json_certificate.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/models/controller_cert_tools_generate_unsigned_certificate_json_certificate.py
@@ -15,6 +15,7 @@ import re  # noqa: F401
 
 import six
 
+
 class ControllerCertToolsGenerateUnsignedCertificateJsonCertificate(object):
     """NOTE: This class is auto generated by the swagger code generator program.
 
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/models/http_validation_error.py b/src/caosadvancedtools/bloxberg/swagger_client/models/http_validation_error.py
index 21c9e467311c596499f3f408c5ac670b5852c6fa..67c23fba87467a7888bff82fc7f11e9d90e15f15 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/models/http_validation_error.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/models/http_validation_error.py
@@ -15,6 +15,7 @@ import re  # noqa: F401
 
 import six
 
+
 class HTTPValidationError(object):
     """NOTE: This class is auto generated by the swagger code generator program.
 
diff --git a/src/caosadvancedtools/bloxberg/swagger_client/models/validation_error.py b/src/caosadvancedtools/bloxberg/swagger_client/models/validation_error.py
index 7ae6bf0900449ff3612798a4503692c4e38e1c11..96d1e23734698efbdad8423c33012473e9aac03b 100644
--- a/src/caosadvancedtools/bloxberg/swagger_client/models/validation_error.py
+++ b/src/caosadvancedtools/bloxberg/swagger_client/models/validation_error.py
@@ -15,6 +15,7 @@ import re  # noqa: F401
 
 import six
 
+
 class ValidationError(object):
     """NOTE: This class is auto generated by the swagger code generator program.
 
diff --git a/src/caosadvancedtools/cache.py b/src/caosadvancedtools/cache.py
index 2b79f9ae7eedaf6e7d6896450a8e7b14e1dc9b30..cf74e330d3efb754d8e79d84ba816877c295c784 100644
--- a/src/caosadvancedtools/cache.py
+++ b/src/caosadvancedtools/cache.py
@@ -27,16 +27,15 @@
 # something to replace this.
 import os
 import sqlite3
-from copy import deepcopy
+import tempfile
+import warnings
 from abc import ABC, abstractmethod
+from copy import deepcopy
 from hashlib import sha256
-import warnings
 
 import caosdb as db
 from lxml import etree
 
-import tempfile
-
 
 def put_in_container(stuff):
     if isinstance(stuff, list):
@@ -344,7 +343,7 @@ class UpdateCache(AbstractCache):
         old_ones = db.Container()
 
         for ent in cont:
-            old_ones.append(db.execute_query("FIND ENTITY {}".format(ent.id),
+            old_ones.append(db.execute_query("FIND ENTITY WITH ID={}".format(ent.id),
                                              unique=True))
 
         return old_ones
diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py
new file mode 100644
index 0000000000000000000000000000000000000000..d34bed9c0189789eb384c303f8311abb38be7324
--- /dev/null
+++ b/src/caosadvancedtools/json_schema_exporter.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Affero General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Affero General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import re
+from typing import Optional
+
+import linkahead as db
+from linkahead.common.datatype import get_list_datatype, is_list_datatype
+
+
+def _make_required_list(rt: db.RecordType):
+    """Return the list of names of properties with importance db.OBLIGATORY."""
+    return [prop.name for prop in rt.properties
+            if rt.get_importance(prop.name) == db.OBLIGATORY]
+
+
+def _make_prop_from_prop(prop: db.Property, additional_options_for_text_props: Optional[dict],
+                         units_in_description: bool):
+    """Return the JSON Schema segment for the given property
+
+    Parameters
+    ----------
+    prop : db.Property
+        the property to be transformed
+    additional_options_for_text_props : Optional[dict]
+        dict of dicts that may contain the keys 'pattern' and 'format' to
+        further define the rules for the JSON Schema segment
+    units_in_description : bool
+        Whether to store the unit of a LinkAhead property in the description of
+        the corresponding json schema item or to create a separate `unit` key
+        instead.
+    """
+
+    if prop.is_reference():
+        raise NotImplementedError(
+            "Reference properties are not supported in this version of the json schema exporter."
+        )
+
+    if not additional_options_for_text_props:
+        additional_options_for_text_props = {}
+    if prop.datatype == db.TEXT or prop.datatype == db.DATETIME:
+        text_format = None
+        text_pattern = None
+        if prop.name in additional_options_for_text_props:
+            if "pattern" in additional_options_for_text_props[prop.name]:
+                text_pattern = additional_options_for_text_props[prop.name]["pattern"]
+            if "format" in additional_options_for_text_props[prop.name]:
+                text_format = additional_options_for_text_props[prop.name]["format"]
+            elif prop.datatype == db.DATETIME:
+                # Set the date or datetime format if only a pattern is given ...
+                text_format = ["date", "date-time"]
+        elif prop.datatype == db.DATETIME:
+            # ... again, for those props that don't appear in the additional
+            # options list.
+            text_format = ["date", "date-time"]
+
+        return _make_text_property(prop.description, text_format, text_pattern)
+
+    json_prop = {}
+    if prop.description:
+        json_prop["description"] = prop.description
+    if units_in_description and prop.unit:
+        if "description" in json_prop:
+            json_prop["description"] += f" Unit is {prop.unit}."
+        else:
+            json_prop["description"] = f"Unit is {prop.unit}."
+    elif prop.unit:
+        json_prop["unit"] = prop.unit
+
+    if prop.datatype == db.BOOLEAN:
+        json_prop["type"] = "boolean"
+    elif prop.datatype == db.INTEGER:
+        json_prop["type"] = "integer"
+    elif prop.datatype == db.DOUBLE:
+        json_prop["type"] = "number"
+    elif is_list_datatype(prop.datatype):
+        json_prop["type"] = "array"
+        list_element_prop = db.Property(
+            name=prop.name, datatype=get_list_datatype(prop.datatype, strict=True))
+        json_prop["items"] = _make_prop_from_prop(
+            list_element_prop, additional_options_for_text_props, units_in_description)
+    else:
+        raise ValueError(
+            f"Unknown or no property datatype. Property {prop.name} with type {prop.datatype}")
+
+    return json_prop
+
+
+def _make_text_property(description="", text_format=None, text_pattern=None):
+
+    prop = {
+        "type": "string"
+    }
+    if description:
+        prop["description"] = description
+    if text_format is not None:
+        if isinstance(text_format, list):
+            # We want the type inside the options, not in the head:
+            # "datetime property": {
+            #   "anyOf": [
+            #     {
+            #       "type": "string",
+            #       "format": "date"
+            #     },
+            #     {
+            #       "type": "string",
+            #       "format": "date-time"
+            #     }]}
+            prop.pop("type")
+            prop["anyOf"] = [{"type": "string", "format": tf} for tf in text_format]
+        else:
+            prop["format"] = text_format
+    if text_pattern is not None:
+        prop["pattern"] = text_pattern
+
+    return prop
+
+
+def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = True,
+                              name_and_description_in_properties: bool = False,
+                              additional_options_for_text_props: Optional[dict] = None,
+                              units_in_description: bool = True):
+    """Create a jsonschema from a given RecordType that can be used, e.g., to
+    validate a json specifying a record of the given type.
+
+    Parameters
+    ----------
+    rt : RecordType
+        The RecordType from which a json schema will be created.
+    additional_properties : bool, optional
+        Whether additional propeties will be admitted in the resulting
+        schema. Optional, default is True.
+    name_and_description_in_properties : bool, optional
+        Whether to include name and description in the `properties` section of
+        the schema to be exported. Optional, default is False.
+    additional_options_for_text_props : dict, optional
+        Dictionary containing additional "pattern" or "format" options for
+        string-typed properties. Optional, default is empty.
+    units_in_description : bool, optional
+        Whether to add the unit of a LinkAhead property (if it has any) to the
+        description of the corresponding schema entry. If set to false, an
+        additional `unit` key is added to the schema itself which is purely
+        annotational and ignored, e.g., in validation. Default is True.
+
+    Returns
+    -------
+    schema : dict
+        A dict containing the json schema created from the given RecordType's properties.
+
+    """
+
+    if additional_options_for_text_props is None:
+        additional_options_for_text_props = {}
+
+    schema = {
+        "$schema": "https://json-schema.org/draft/2019-09/schema",
+        "type": "object"
+    }
+    if rt.name:
+        schema["title"] = rt.name
+    if rt.description:
+        schema["description"] = rt.description
+
+    schema["required"] = _make_required_list(rt)
+    schema["additionalProperties"] = additional_properties
+
+    props = {}
+    if name_and_description_in_properties:
+        props["name"] = _make_text_property("The name of the Record to be created")
+        props["description"] = _make_text_property("The description of the Record to be created")
+
+    for prop in rt.properties:
+        if prop.name in props:
+            # Multi property
+            raise NotImplementedError(
+                "Creating a schema for multi-properties is not specified. "
+                f"Property {prop.name} occurs more than once."
+            )
+        props[prop.name] = _make_prop_from_prop(
+            prop, additional_options_for_text_props, units_in_description)
+
+    schema["properties"] = props
+    return schema
diff --git a/src/caosadvancedtools/models/parser.py b/src/caosadvancedtools/models/parser.py
index b354c42bc555a73e97f69889d4a64b6b50b56c83..25b5727c2674e0fbfa58f31a595da91aebfc806a 100644
--- a/src/caosadvancedtools/models/parser.py
+++ b/src/caosadvancedtools/models/parser.py
@@ -198,7 +198,7 @@ def parse_model_from_json_schema(
         ignored. Default is False.
 
     existing_model : dict, optional
-        An existing model to which the created model shall be added.
+        An existing model to which the created model shall be added.  Not implemented yet.
 
     Returns
     -------
@@ -213,6 +213,9 @@ def parse_model_from_json_schema(
     about the limitations of the current implementation.
 
     """
+    if existing_model is not None:
+        raise NotImplementedError("Adding to an existing model is not implemented yet.")
+
     # @author Florian Spreckelsen
     # @date 2022-02-17
     # @review Timm Fitschen 2023-05-25
diff --git a/unittests/test_cfood.py b/unittests/test_cfood.py
index 62e4b114a1c5cd8f3631f774637f3876c545afd3..e2f15ffdc7929fbd67aee37bccdb0f44cacef104 100644
--- a/unittests/test_cfood.py
+++ b/unittests/test_cfood.py
@@ -32,7 +32,7 @@ from caosadvancedtools.cfood import (AbstractCFood, AbstractFileCFood, CMeal,
                                      get_entity_for_path)
 from caosadvancedtools.crawler import FileCrawler
 from caosadvancedtools.example_cfood import ExampleCFood
-from caosdb.common.models import _parse_single_xml_element
+from linkahead.common.models import _parse_single_xml_element
 from lxml import etree
 from datetime import datetime, timezone
 
diff --git a/unittests/test_h5.py b/unittests/test_h5.py
index 360d4b28938492d0f2af6d696e39dffb1cc3fead..961dd4246ef4b02208226ada5d3e1389133ddbcc 100644
--- a/unittests/test_h5.py
+++ b/unittests/test_h5.py
@@ -1,8 +1,8 @@
 import unittest
 from tempfile import NamedTemporaryFile
 
-import caosdb as db
-import caosdb.apiutils
+import linkahead as db
+import linkahead.apiutils
 import h5py
 import numpy as np
 from caosadvancedtools.cfoods import h5
@@ -77,8 +77,8 @@ class H5CFoodTest(unittest.TestCase):
         # TODO this does probably break the code: The function will not be
         # restored correctly.
         # Change it to use the BaseMockUpTest
-        real_retrieve = caosdb.apiutils.retrieve_entity_with_id
-        caosdb.apiutils.retrieve_entity_with_id = dummy_get
+        real_retrieve = linkahead.apiutils.retrieve_entity_with_id
+        linkahead.apiutils.retrieve_entity_with_id = dummy_get
 
         # should run without problem
         h5.collect_existing_structure(db.Record(), db.Record(id=234), h5.EntityMapping())
@@ -151,7 +151,7 @@ class H5CFoodTest(unittest.TestCase):
         self.assertEqual(em.to_existing[r_child2._cuid], ENTS[101])
         self.assertEqual(em.to_target[101], r_child2)
 
-        caosdb.apiutils.retrieve_entity_with_id = real_retrieve
+        linkahead.apiutils.retrieve_entity_with_id = real_retrieve
 
     def test_h5_attr_to_property(self):
 
@@ -160,7 +160,8 @@ class H5CFoodTest(unittest.TestCase):
         test_float = np.float_(1.0)
         test_str = "Test"
         test_complex: complex = 2+3j
-        self.assertRaises(NotImplementedError, h5_attr_to_property, test_int)  # only numpy-integers processed?
+        self.assertRaises(NotImplementedError, h5_attr_to_property,
+                          test_int)  # only numpy-integers processed?
         self.assertTupleEqual((1, db.INTEGER), h5_attr_to_property(test_integer))
         self.assertTupleEqual((1.0, db.DOUBLE), h5_attr_to_property(test_float))
         self.assertTupleEqual(("Test", db.TEXT), h5_attr_to_property(test_str))
@@ -187,4 +188,5 @@ class H5CFoodTest(unittest.TestCase):
         # Test scalar values given as np.array
         self.assertTupleEqual((1, db.INTEGER), h5_attr_to_property(np.array(1)))
         self.assertTupleEqual((1.123, db.DOUBLE), h5_attr_to_property(np.array(1.123)))
-        self.assertTupleEqual(('Hello World', db.TEXT), h5_attr_to_property(np.array("Hello World")))
+        self.assertTupleEqual(('Hello World', db.TEXT),
+                              h5_attr_to_property(np.array("Hello World")))
diff --git a/unittests/test_json_schema_exporter.py b/unittests/test_json_schema_exporter.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1a51f1aee87d500dab6536b0df08f1535226dbf
--- /dev/null
+++ b/unittests/test_json_schema_exporter.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+# encoding: utf-8
+#
+# This file is a part of the CaosDB Project.
+#
+# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Florian Spreckelsen <f.spreckelsen@indiscale.com>
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Affero General Public License as published by the Free
+# Software Foundation, either version 3 of the License, or (at your option) any
+# later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Affero General Public License along
+# with this program. If not, see <https://www.gnu.org/licenses/>.
+#
+
+import linkahead as db
+
+from jsonschema import FormatChecker, validate, ValidationError
+from pytest import raises
+
+from caosadvancedtools.json_schema_exporter import recordtype_to_json_schema as rtjs
+
+
+def test_empty_rt():
+
+    rt = db.RecordType(name="Test", description="descr")
+
+    schema = rtjs(rt)
+
+    assert schema["title"] == rt.name
+    assert schema["description"] == rt.description
+    assert len(schema["properties"]) == 0
+    assert len(schema["required"]) == 0
+    assert schema["additionalProperties"] is True
+
+    schema = rtjs(rt, additional_properties=False)
+
+    assert schema["title"] == rt.name
+    assert schema["description"] == rt.description
+    assert len(schema["properties"]) == 0
+    assert len(schema["required"]) == 0
+    assert schema["additionalProperties"] is False
+
+    schema = rtjs(rt, name_and_description_in_properties=True)
+
+    assert len(schema["properties"]) == 2
+    assert "name" in schema["properties"]
+    assert "description" in schema["properties"]
+    assert schema["properties"]["name"]["type"] == "string"
+    assert schema["properties"]["description"]["type"] == "string"
+
+
+def test_rt_with_scalar_props():
+
+    rt = db.RecordType(name="Test")
+    rt.add_property(name="SimpleText", datatype=db.TEXT, description="This is a simple text")
+    rt.add_property(name="ObligatoryDatetime", datatype=db.DATETIME, importance=db.OBLIGATORY)
+    rt.add_property(name="JustDateNoTime", datatype=db.DATETIME, description="Only dates, no times")
+    rt.add_property(name="ObligatoryInteger", datatype=db.INTEGER, importance=db.OBLIGATORY)
+    rt.add_property(name="Double", datatype=db.DOUBLE)
+    # Suggested shouldn't influence the result in any way.
+    rt.add_property(name="Boolean", datatype=db.BOOLEAN, importance=db.SUGGESTED)
+
+    schema = rtjs(rt, additional_options_for_text_props={"JustDateNoTime": {"format": "date"}})
+
+    assert "properties" in schema
+    props = schema["properties"]
+    assert len(props) == 6
+    assert "required" in schema
+    assert len(schema["required"]) == 2
+    assert "ObligatoryDatetime" in schema["required"]
+    assert "ObligatoryInteger" in schema["required"]
+
+    assert "SimpleText" in props
+    assert props["SimpleText"]["type"] == "string"
+    assert "format" not in props["SimpleText"]
+    assert "description" in props["SimpleText"]
+    assert props["SimpleText"]["description"] == "This is a simple text"
+
+    assert "ObligatoryDatetime" in props
+    assert "type" not in props["ObligatoryDatetime"]
+    assert "anyOf" in props["ObligatoryDatetime"]
+    assert len(props["ObligatoryDatetime"]["anyOf"]) == 2
+    date_found = 0
+    datetime_found = 0
+    for option in props["ObligatoryDatetime"]["anyOf"]:
+        assert option["type"] == "string"
+        fmt = option["format"]
+        if fmt == "date":
+            date_found += 1
+        if fmt == "date-time":
+            datetime_found += 1
+    assert date_found == 1
+    assert datetime_found == 1
+
+    assert "JustDateNoTime" in props
+    assert props["JustDateNoTime"]["type"] == "string"
+    assert "anyOf" not in props["JustDateNoTime"]
+    assert "pattern" not in props["JustDateNoTime"]
+    assert props["JustDateNoTime"]["format"] == "date"
+    assert props["JustDateNoTime"]["description"] == "Only dates, no times"
+
+    assert "ObligatoryInteger" in props
+    assert props["ObligatoryInteger"]["type"] == "integer"
+
+    assert "Double" in props
+    assert props["Double"]["type"] == "number"
+
+    assert "Boolean" in props
+    assert props["Boolean"]["type"] == "boolean"
+
+    # test validation (we turst the jsonschema.validat function, so only test
+    # some more or less tricky cases with format or required).
+    example = {
+        "SimpleText": "something",
+        "ObligatoryInteger": 23,
+        "ObligatoryDatetime": "1900-01-01T12:34:56.0Z",
+        "JustDateNoTime": "2023-10-13"
+    }
+
+    # We need to explicitly enable the FormatChecker, otherwise format will be
+    # ignored
+    # (https://python-jsonschema.readthedocs.io/en/latest/validate/#validating-formats)
+    validate(example, schema, format_checker=FormatChecker())
+
+    example = {
+        "SimpleText": "something",
+        "ObligatoryInteger": 23,
+        "ObligatoryDatetime": "1900-01-01",
+        "JustDateNoTime": "2023-10-13"
+    }
+    validate(example, schema, format_checker=FormatChecker())
+
+    example = {
+        "SimpleText": "something",
+        "ObligatoryDatetime": "1900-01-01T12:34:56.0Z",
+        "JustDateNoTime": "2023-10-13"
+    }
+
+    with raises(ValidationError):
+        # required missing
+        validate(example, schema, format_checker=FormatChecker())
+
+    example = {
+        "SimpleText": "something",
+        "ObligatoryInteger": 23,
+        "ObligatoryDatetime": "1900-01-01T12:34:56.0Z",
+        "JustDateNoTime": "2023-10-13T23:59:59.123Z"
+    }
+
+    with raises(ValidationError):
+        # date expected in JustDateNoTime, but datetime given
+        validate(example, schema, format_checker=FormatChecker())
+
+
+def test_units():
+
+    rt = db.RecordType()
+    rt.add_property(name="ScalarWithUnit", datatype=db.DOUBLE, unit="m")
+    rt.add_property(name="ListWithUnit", description="This is a list.",
+                    datatype=db.LIST(db.DOUBLE), unit="m")
+
+    schema = rtjs(rt, units_in_description=True)
+
+    props = schema["properties"]
+    assert "ScalarWithUnit" in props
+    assert props["ScalarWithUnit"]["type"] == "number"
+    assert "description" in props["ScalarWithUnit"]
+    assert props["ScalarWithUnit"]["description"] == "Unit is m."
+    assert "unit" not in props["ScalarWithUnit"]
+
+    assert "ListWithUnit" in props
+    assert props["ListWithUnit"]["type"] == "array"
+    assert "items" in props["ListWithUnit"]
+    assert props["ListWithUnit"]["items"]["type"] == "number"
+    assert "description" in props["ListWithUnit"]
+    assert props["ListWithUnit"]["description"] == "This is a list. Unit is m."
+    assert "unit" not in props["ListWithUnit"]
+
+    schema = rtjs(rt, units_in_description=False)
+
+    props = schema["properties"]
+    assert "ScalarWithUnit" in props
+    assert props["ScalarWithUnit"]["type"] == "number"
+    assert "description" not in props["ScalarWithUnit"]
+    assert "unit" in props["ScalarWithUnit"]
+    assert props["ScalarWithUnit"]["unit"] == "m"
+
+    assert "ListWithUnit" in props
+    assert props["ListWithUnit"]["type"] == "array"
+    assert "items" in props["ListWithUnit"]
+    assert props["ListWithUnit"]["items"]["type"] == "number"
+    assert "description" in props["ListWithUnit"]
+    assert props["ListWithUnit"]["description"] == "This is a list."
+    assert "unit" in props["ListWithUnit"]
+    assert props["ListWithUnit"]["unit"] == "m"
+
+
+def test_rt_with_list_props():
+
+    rt = db.RecordType()
+    rt.add_property(name="ListOfIntegers", datatype=db.LIST(
+        db.INTEGER), description="List of integers")
+    rt.add_property(name="ListOfPatterns", datatype=db.LIST(db.TEXT))
+
+    schema = rtjs(rt, additional_options_for_text_props={"ListOfPatterns": {"pattern": "[A-Z]+"}})
+
+    props = schema["properties"]
+
+    assert "ListOfIntegers" in props
+    assert props["ListOfIntegers"]["type"] == "array"
+    assert "items" in props["ListOfIntegers"]
+    assert props["ListOfIntegers"]["items"]["type"] == "integer"
+    assert "description" not in props["ListOfIntegers"]["items"]
+    assert props["ListOfIntegers"]["description"] == "List of integers"
+
+    assert "ListOfPatterns" in props
+    assert props["ListOfPatterns"]["type"] == "array"
+    assert "items" in props["ListOfPatterns"]
+    assert props["ListOfPatterns"]["items"]["type"] == "string"
+    assert props["ListOfPatterns"]["items"]["pattern"] == "[A-Z]+"
+
+    # Validation
+    example = {
+        "ListOfIntegers": [1, 2, 3],
+        "ListOfPatterns": ["A", "BB", "CCC"]
+    }
+    validate(example, schema, format_checker=FormatChecker())
+
+    example = {
+        "ListOfIntegers": 1,
+        "ListOfPatterns": ["A", "BB", "CCC"]
+    }
+    with raises(ValidationError):
+        # No list
+        validate(example, schema, format_checker=FormatChecker())
+
+    example = {
+        "ListOfIntegers": [1, 2, 3],
+        "ListOfPatterns": ["A", "bb", "CCC"]
+    }
+    with raises(ValidationError):
+        # Pattern doesn't match
+        validate(example, schema, format_checker=FormatChecker())
+
+
+def test_rt_with_references():
+    """References and lists of references will come later, so test if the errors
+    are thrown correctly.
+
+    """
+
+    rt = db.RecordType()
+    rt.add_property(name="RefProp", datatype=db.REFERENCE)
+
+    with raises(NotImplementedError):
+
+        rtjs(rt)
+
+    rt = db.RecordType()
+    rt.add_property(name="RefProp", datatype="OtherType")
+
+    with raises(NotImplementedError):
+
+        rtjs(rt)
+
+    rt = db.RecordType()
+    rt.add_property(name="RefProp", datatype=db.LIST(db.REFERENCE))
+
+    with raises(NotImplementedError):
+
+        rtjs(rt)
+
+    rt = db.RecordType()
+    rt.add_property(name="RefProp", datatype=db.LIST("OtherType"))
+
+    with raises(NotImplementedError):
+
+        rtjs(rt)
+
+
+def test_broken():
+
+    rt = db.RecordType()
+    rt.add_property(name="something", datatype=None)
+
+    with raises(ValueError) as ve:
+
+        rtjs(rt)
+        assert str(ve).startswith("Unknown or no property datatype.")
+
+    rt = db.RecordType()
+    rt.add_property(name="MultiProp", datatype=db.INTEGER)
+    rt.add_property(name="MultiProp", datatype=db.INTEGER)
+
+    with raises(NotImplementedError) as nie:
+
+        rtjs(rt)
+        assert "MultiProp" in str(nie)
+        assert str(nie).startswith("Creating a schema for multi-properties is not specified.")
diff --git a/utils/branch_exists.py b/utils/branch_exists.py
new file mode 100755
index 0000000000000000000000000000000000000000..9626e4aa81e4ee2bd9a239f6a0650dc4e383593f
--- /dev/null
+++ b/utils/branch_exists.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python3
+"""
+Exit with error code 2 if the branch does not exist.
+"""
+import sys
+import argparse
+import requests
+from ref_to_commit import get_remote
+
+
+def branch_exists(repository, branch):
+    remote = get_remote(repository)
+    resp = requests.get(remote+"/repository/branches/"+branch).json()
+    return "message" not in resp
+
+
+def define_parser():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("repository")
+    parser.add_argument("branchname")
+
+    return parser
+
+
+if __name__ == "__main__":
+    parser = define_parser()
+    args = parser.parse_args()
+    ret = branch_exists(repository=args.repository, branch=args.branchname)
+    if ret is False:
+        print("branch does not exist.")
+        sys.exit(2)
+    else:
+        print("branch exists.")
diff --git a/utils/ref_to_commit.py b/utils/ref_to_commit.py
new file mode 100755
index 0000000000000000000000000000000000000000..93f15f31b6158172cfca5a5095b13f6a4fcb22ab
--- /dev/null
+++ b/utils/ref_to_commit.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+"""
+replaces git branch names with the newest commit hash using gitlab api
+"""
+import argparse
+
+import requests
+
+
+_REPOS = {
+    "SERVER": "https://gitlab.indiscale.com/api/v4/projects/100",
+    "WEBUI": "https://gitlab.indiscale.com/api/v4/projects/98",
+    "PYLIB": "https://gitlab.indiscale.com/api/v4/projects/97",
+    "MYSQLBACKEND": "https://gitlab.indiscale.com/api/v4/projects/101",
+    "PYINT": "https://gitlab.indiscale.com/api/v4/projects/99",
+    "CPPLIB": "https://gitlab.indiscale.com/api/v4/projects/107",
+    "CPPINT": "https://gitlab.indiscale.com/api/v4/projects/111",
+    "ADVANCEDUSERTOOLS": "https://gitlab.indiscale.com/api/v4/projects/104"
+}
+
+
+def get_remote(repository):
+    return _REPOS[repository]
+
+
+def ref_to_commit(repository, reference):
+    remote = get_remote(repository)
+    r = requests.get(remote+"/repository/branches/"+reference).json()
+
+    if "name" in r:
+        return r["commit"]["short_id"]
+
+    return reference
+
+
+def define_parser():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("repository")
+    parser.add_argument("reference")
+
+    return parser
+
+
+if __name__ == "__main__":
+    parser = define_parser()
+    args = parser.parse_args()
+    ret = ref_to_commit(repository=args.repository, reference=args.reference)
+    print(ret)