diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0f9a258de99ba559d280fc5ace74a3f111a9e30e..a773c6776b5224dc482bc104fe490e98b9e19eb5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -57,22 +57,10 @@ mypy:
   tags: [ docker ]
   stage: linting
   script:
-    - pip install mypy types-PyYAML types-jsonschema types-requests types-setuptools types-lxml types-python-dateutil pytest
+    - pip install .[mypy,test]
     - make mypy
-  allow_failure: true
 
 # run unit tests
-unittest_py3.8:
-  tags: [ docker ]
-  stage: test
-  needs: [ ]
-  image: python:3.8
-  script: &python_test_script
-    # Python docker has problems with tox and pip so use plain pytest here
-    - touch ~/.pylinkahead.ini
-    - pip install pynose pytest pytest-cov jsonschema>=4.4.0 setuptools
-    - pip install .
-    - python -m pytest unittests
 
 # This needs to be changed once Python 3.9 isn't the standard Python in Debian
 # anymore.
@@ -92,7 +80,11 @@ unittest_py3.10:
   stage: test
   needs: [ ]
   image: python:3.10
-  script: *python_test_script
+  script: &python_test_script
+    # Python docker has problems with tox and pip so use plain pytest here
+    - touch ~/.pylinkahead.ini
+    - pip install .[test]
+    - python -m pytest unittests
 
 unittest_py3.11:
   tags: [ docker ]
@@ -109,19 +101,25 @@ unittest_py3.12:
   script: *python_test_script
 
 unittest_py3.13:
-  allow_failure: true
   tags: [ docker ]
   stage: test
   needs: [ ]
-  image: python:3.13-rc
-  script:
-    # TODO: Replace by '*python_test_script' as soon as 3.13 has been officially released.
-    # Python docker has problems with tox and pip so use plain pytest here
-    - apt update && apt install -y cargo
-    - touch ~/.pylinkahead.ini
-    - pip install pynose pytest pytest-cov jsonschema>=4.4.0 setuptools
-    - pip install .
-    - python -m pytest unittests
+  image: python:3.13
+  script: *python_test_script
+
+unittest_py3.14:
+  allow_failure: true   # remove on release
+  tags: [ docker ]
+  stage: test
+  needs: [ ]
+  image: python:3.14-rc
+  script:               # replace by '*python_test_script' on release
+    # Install cargo manually, source its env, and set it to accept 3.14 as interpreter
+    - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
+    - . "$HOME/.cargo/env"
+    - export PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1
+    # Continue normally
+    - *python_test_script
 
 # Trigger building of server image and integration tests
 trigger_build:
@@ -167,7 +165,7 @@ build-testenv:
 pages_prepare: &pages_prepare
   tags: [ cached-dind ]
   stage: deploy
-  needs: [ code_style, pylint, unittest_py3.8, unittest_py3.9, unittest_py3.10 ]
+  needs: [ code_style, pylint, unittest_py3.9, unittest_py3.10 ]
   only:
     refs:
       - /^release-.*$/i
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e130b1cdb86688554c53656c5272db3c9dc92d1d..20f2498a09e5599933c18606febf8f160594a3c8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,115 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 
 ## [Unreleased] ##
 
+### Added ###
+- convenience functions `value_matches_versionid`, `get_id_from_versionid` and `get_versionid`
+
+### Changed ###
+
+### Deprecated ###
+
+### Removed ###
+
+### Fixed ###
+
+### Security ###
+
+### Documentation ###
+
+* [#78](https://gitlab.com/linkahead/linkahead-pylib/-/issues/78) Fix
+  and extend test-registration docstrings.
+
+## [0.17.0] - 2025-01-14 ##
+
+### Added ###
+
+* New setup extra `test` which installs the dependencies for testing.
+* The Container class has a new member function `filter_by_identity`
+  which is based on `_filter_entity_list`.
+* The `Entity` properties `_cuid` and `_flags` are now available for read-only access
+  as `cuid` and `flags`, respectively.
+
+### Changed ###
+
+* Renamed the `filter` function of Container, ParentList and
+  PropertyList to `filter_by_identity`.
+
+### Deprecated ###
+
+* `ParentList.filter` and `PropertyList.filter` functions, use
+  `filter_by_identity` instead.
+
+### Removed ###
+
+* Support for Python 3.8
+
+### Fixed ###
+
+* [#73](https://gitlab.com/linkahead/linkahead-pylib/-/issues/73)
+  `Entity.to_xml` now detects potentially infinite recursion and prevents an error
+* [#89](https://gitlab.com/linkahead/linkahead-pylib/-/issues/89)
+  `to_xml` does not add `noscript` or `TransactionBenchmark` tags anymore
+* [#103](https://gitlab.com/linkahead/linkahead-pylib/-/issues/103)
+  `authentication/interface/on_response()` does not overwrite
+  `auth_token` if new value is `None`
+* [#119](https://gitlab.com/linkahead/linkahead-pylib/-/issues/119)
+  The diff returned by compare_entities now uses id instead of name as
+  key if either property does not have a name
+* [#87](https://gitlab.com/linkahead/linkahead-pylib/-/issues/87)
+  `XMLSyntaxError` messages when parsing (incomplete) responses in
+  case of certain connection timeouts.
+  The diff returned by compare_entities now uses id instead of name as key if either property does not have a name
+* [#127](https://gitlab.com/linkahead/linkahead-pylib/-/issues/127)
+  pylinkahead.ini now supports None and tuples as values for the `timeout` keyword
+
+## [0.16.0] - 2024-11-13 ##
+
+### Added ###
+
+* `ParentList` and `PropertyList` now have a `filter` function that allows to select a subset of
+  the contained elements by ID and/or name.
+* Official support for Python 3.13
+* Added arguments to `describe_diff` that allow customizing the labels for the 'old' and the 'new' diffs.
+* Optional `realm` argument for `linkahead_admin.py set_user_password`
+  which defaults to `None`, i.e., the server's default realm.
+
+### Changed ###
+
+* `compare_entities` is now case insensitive with respect to property and
+  recordtype names
+* `_ParentList` is now called `ParentList`
+* `_Properties` is now called `PropertyList`
+* `ParentList.remove` is now case insensitive when a name is used.
+
+### Deprecated ###
+
+* the use of the arguments `old_entity` and `new_entity` in `compare_entities`
+  is now deprecated. Please use `entity0` and `entity1` respectively instead.
+
+### Fixed ###
+
+* [gitlab.indiscale.com#200](https://gitlab.indiscale.com/caosdb/src/caosdb-pylib/-/issues/200)
+  ``linkahead_admin.py`` prints reasonable error messages when users
+  or roles don't exist.
+
+## [0.15.1] - 2024-08-21 ##
+
+### Deprecated ###
+
+* `connection.get_username`. Use `la.Info().user_info.name` instead.
+
+### Fixed ###
+
+* [#128](https://gitlab.com/linkahead/linkahead-pylib/-/issues/128)
+  Assign `datetime.date` or `datetime.datetime` values to `DATETIME`
+  properties.
+
+### Documentation ###
+
+* Added docstrings for `linkahead.models.Info` and `linkahead.models.UserInfo`.
+
+## [0.15.0] - 2024-07-09 ##
+
 ### Added ###
 
 * Support for Python 3.12
@@ -19,7 +128,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 
 ### Changed ###
 
-### Deprecated ###
+* Using environment variable PYLINKAHEADINI instead of PYCAOSDBINI.
 
 ### Removed ###
 
@@ -35,10 +144,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 * [#120](https://gitlab.com/linkahead/linkahead-pylib/-/issues/120) Unwanted
   subproperties in reference properties.
 
-### Security ###
-
 ### Documentation ###
 
+* Added documentation and a tutorial example for the usage of the `page_length`
+  argument of `execute_query`.
+
 ## [0.14.0] - 2024-02-20
 
 ### Added ###
diff --git a/CITATION.cff b/CITATION.cff
index cbcb570b27b7cd71f50645614222302bccc34805..bcecc2fdd962f4c581a2d53d5c1a324fb643a4a3 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -20,6 +20,6 @@ authors:
     given-names: Stefan
     orcid: https://orcid.org/0000-0001-7214-8125
 title: CaosDB - Pylib
-version: 0.14.0
+version: 0.17.0
 doi: 10.3390/data4020083
-date-released: 2024-02-20
+date-released: 2025-01-14
diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index e2326b831a71751265c6c2d5a333ccc37145bfa5..e9bd54a1459df22afa307e256625d05e74bdc6a8 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -1,5 +1,5 @@
 * caosdb-server >= 0.12.0
-* Python >= 3.8
+* Python >= 3.9
 * pip >= 20.0.2
 
 Any other dependencies are defined in the setup.py and are being installed via pip
diff --git a/Makefile b/Makefile
index 21ea40ac8a6eb34032aba75c089e278fa354a6f5..7490c5d586c18d9da361f8940a39550779fb5938 100644
--- a/Makefile
+++ b/Makefile
@@ -40,7 +40,7 @@ style:
 .PHONY: style
 
 lint:
-	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead/common
+	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead
 .PHONY: lint
 
 mypy:
diff --git a/README.md b/README.md
index 193cb8f0cff8ff5cee36a40a78e53f070527e2e0..d630e879e9ff4781bb79b193d0240ef11ae211d2 100644
--- a/README.md
+++ b/README.md
@@ -47,7 +47,7 @@ However, you can also create an issue for it.
 
 * Copyright (C) 2018 Research Group Biomedical Physics, Max Planck Institute
   for Dynamics and Self-Organization Göttingen.
-* Copyright (C) 2020-2022 Indiscale GmbH <info@indiscale.com>
+* Copyright (C) 2020-2025 Indiscale GmbH <info@indiscale.com>
 
 All files in this repository are licensed under a [GNU Affero General Public
 License](LICENCE.md) (version 3 or later).
diff --git a/README_SETUP.md b/README_SETUP.md
index b05eff87711b84682aa82bbd0aafd61f2e8c86eb..f4c921382edb26776391590298faed06a5391396 100644
--- a/README_SETUP.md
+++ b/README_SETUP.md
@@ -2,24 +2,44 @@
 
 ## Installation ##
 
-### Requirements ###
+### How to install ###
 
-PyCaosDB needs at least Python 3.8.  Additionally, the following packages are required (they will
-typically be installed automatically):
+First ensure that python with at least version 3.9 is installed. Should this not be
+the case, you can use the [Installing python](#installing-python-) guide for your OS.
 
-- `lxml`
-- `PyYaml`
-- `PySocks`
+#### Generic installation ####
 
-Optional packages:
-- `keyring`
-- `jsonschema`
+To install this LinkAhead python client locally, use `pip`/`pip3`:
 
-### How to install ###
+```sh
+pip install linkahead
+```
+
+#### Additional dependencies ####
+
+To test using tox, you also need to install tox:  
+`pip install tox`  
+
+To install dependencies used by optional functionality, the following pip extras 
+keywords are defined:
+- `test` for testing with pytest
+- `mypy` for mypy and types
+- `jsonschema` 
+- `keyring` 
+
+These extras can be installed using: 
+```sh
+pip install .[KEYWORD]
+```
+A current list of the dependencies installed with this program as well as those installed with 
+the keywords can be found in `setup.py`s `setup_package()` method, in the `metadata` dictionary
+entries `install_requires` and `extras_require`.
+
+### Installing python ###
 
 #### Linux ####
 
-Make sure that Python (at least version 3.8) and pip is installed, using your system tools and
+Make sure that Python (at least version 3.9) and pip is installed, using your system tools and
 documentation.
 
 Then open a terminal and continue in the [Generic installation](#generic-installation) section.
@@ -51,34 +71,7 @@ cd /Applications/Python\ 3.9/
 sudo ./Install\ Certificates.command
 ```
 
-After these steps, you may continue with the [Generic
-installation](#generic-installation).
-
-#### Generic installation ####
-
-To install PyCaosDB locally, use `pip3` (also called `pip` on some systems):
-
-```sh
-pip3 install --user caosdb
-```
-
----
-
-Alternatively, obtain the sources from GitLab and install from there (`git` must be installed for
-this option):
-
-```sh
-git clone https://gitlab.com/caosdb/caosdb-pylib
-cd caosdb-pylib
-pip3 install --user .
-```
-
-For installation of optional packages, install with an additional option, e.g. for 
-validating with the caosdb json schema:
-
-```sh
-pip3 install --user .[jsonschema]
-```
+After these steps, you may continue with the [Generic installation](#generic-installation) section.
 
 ## Configuration ##
 
@@ -87,7 +80,7 @@ is described in detail in the [configuration section of the documentation](https
 
 ## Try it out ##
 
-Start Python and check whether the you can access the database. (You will be asked for the
+Start Python and check whether you can access the database. (You will be asked for the
 password):
 
 ```python
@@ -107,6 +100,7 @@ Now would be a good time to continue with the [tutorials](tutorials/index).
 - Run all tests: `tox` or `make unittest`
 - Run a specific test file: e.g. `tox -- unittests/test_schema.py`
 - Run a specific test function: e.g. `tox -- unittests/test_schema.py::test_config_files`
+- To run using pytest: `pytest .`
 
 ## Documentation ##
 We use sphinx to create the documentation. Docstrings in the code should comply
@@ -114,12 +108,6 @@ with the Googly style (see link below).
 
 Build documentation in `build/` with `make doc`.
 
-### Requirements ###
-
-- `sphinx`
-- `sphinx-autoapi`
-- `recommonmark`
-
 ### How to contribute ###
 
 - [Google Style Python Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
@@ -127,7 +115,7 @@ Build documentation in `build/` with `make doc`.
 - [References to other documentation](https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#role-external)
 
 ### Troubleshooting ###
-If the client is to be executed directly from the `/src` folder, an initial `.\setup.py install --user` must be called.
+If the client is to be executed directly from the `/src` folder, an initial `.\setup.py install` must be called.
 
 ## Migration ##
 TODO
diff --git a/examples/pylinkahead.ini b/examples/pylinkahead.ini
index f37e24e0e5b754ec58a07b034ba2755096f0b441..84d1eb8526201c817d6614e7eb74f35a932c5d78 100644
--- a/examples/pylinkahead.ini
+++ b/examples/pylinkahead.ini
@@ -1,7 +1,7 @@
 # To be found be the caosdb package, the INI file must be located either in
 # - $CWD/pylinkahead.ini
 # - $HOME/.pylinkahead.ini
-# - the location given in the env variable PYCAOSDBINI
+# - the location given in the env variable PYLINKAHEADINI
 
 [Connection]
 # URL of the CaosDB server
diff --git a/examples/set_permissions.py b/examples/set_permissions.py
index a558bde73897cb6827c93373cc8327efc10e6e15..4657f2cca182b567c761a777df838825f8e89aef 100755
--- a/examples/set_permissions.py
+++ b/examples/set_permissions.py
@@ -37,13 +37,13 @@ from caosdb import administration as admin
 def assert_user_and_role():
     """Make sure that users and roles exist.
 
-After calling this function, there will be a user "jane" with the role "human"
-and the user "xaxys" with the role "alien".  These users and roles are returned.
+    After calling this function, there will be a user "jane" with the role "human"
+    and the user "xaxys" with the role "alien".  These users and roles are returned.
 
-Returns
--------
-out : tuple
-  ((human_user, human_role), (alien_user, alien_role))
+    Returns
+    -------
+    out : tuple
+      ((human_user, human_role), (alien_user, alien_role))
 
     """
     try:
@@ -81,15 +81,15 @@ out : tuple
 def get_entities(count=1):
     """Retrieve one or more entities.
 
-Parameters
-----------
-count : int, optional
-    How many entities to retrieve.
+    Parameters
+    ----------
+    count : int, optional
+        How many entities to retrieve.
 
-Returns
--------
-out : Container
-    A container of retrieved entities, the length is given by the parameter count.
+    Returns
+    -------
+    out : Container
+        A container of retrieved entities, the length is given by the parameter count.
     """
     cont = db.execute_query("FIND RECORD 'Human Food'", flags={
                             "P": "0L{n}".format(n=count)})
@@ -102,20 +102,20 @@ out : Container
 def set_permission(role_grant, role_deny, cont=None, general=False):
     """Set the permissions of some entities.
 
-Parameters
-----------
-role_grant : str
-    Role which is granted permissions.
+    Parameters
+    ----------
+    role_grant : str
+        Role which is granted permissions.
 
-role_deny : str
-    Role which is denied permissions.
+    role_deny : str
+        Role which is denied permissions.
 
-cont : Container
-    Entities for which permissions are set.
+    cont : Container
+        Entities for which permissions are set.
 
-general : bool, optional
-    If True, the permissions for the roles will be set.  If False (the default),
-    permissions for the entities in the container will be set.
+    general : bool, optional
+        If True, the permissions for the roles will be set.  If False (the default),
+        permissions for the entities in the container will be set.
     """
 
     # Set general permissions
@@ -143,23 +143,23 @@ general : bool, optional
 def test_permission(granted_user, denied_user, cont):
     """Tests if the permissions are set correctly for two users.
 
-Parameters
-----------
-granted_user : (str, str)
-    The user which should have permissions to retrieve the entities in `cont`.
-    Given as (user, password).
+    Parameters
+    ----------
+    granted_user : (str, str)
+        The user which should have permissions to retrieve the entities in `cont`.
+        Given as (user, password).
 
-denied_user : (str, str)
-    The user which should have no permission to retrieve the entities in `cont`.
-    Given as (user, password).
+    denied_user : (str, str)
+        The user which should have no permission to retrieve the entities in `cont`.
+        Given as (user, password).
 
-cont :  Container
-    Entities for which permissions are tested.
+    cont :  Container
+        Entities for which permissions are tested.
 
 
-Returns
--------
-None
+    Returns
+    -------
+    None
 
     """
 
diff --git a/setup.py b/setup.py
index ee2a5fb6fd7212acfc9ce9bc732fc9f2d4f345b4..ab8555b858b0004b59e979ac2de7b2b8450073b1 100755
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,7 @@ from setuptools import find_packages, setup
 
 ISRELEASED = False
 MAJOR = 0
-MINOR = 14
+MINOR = 17
 MICRO = 1
 # Do not tag as pre-release until this commit
 # https://github.com/pypa/packaging/pull/515
@@ -179,7 +179,7 @@ def setup_package():
             "Topic :: Scientific/Engineering :: Information Analysis",
         ],
         packages=find_packages('src'),
-        python_requires='>=3.8',
+        python_requires='>=3.9',
         package_dir={'': 'src'},
         install_requires=['lxml>=4.6.3',
                           "requests[socks]>=2.26",
@@ -187,11 +187,27 @@ def setup_package():
                           'PyYAML>=5.4.1',
                           'future',
                           ],
-        extras_require={'keyring': ['keyring>=13.0.0'],
-                        'jsonschema': ['jsonschema>=4.4.0']},
+        extras_require={
+            "jsonschema": ["jsonschema>=4.4.0"],
+            "keyring": ["keyring>=13.0.0"],
+            "mypy": [
+                "mypy",
+                "types-PyYAML",
+                "types-jsonschema",
+                "types-requests",
+                "types-setuptools",
+                "types-lxml",
+                "types-python-dateutil",
+            ],
+            "test": [
+                "pytest",
+                "pytest-cov",
+                "coverage>=4.4.2",
+                "jsonschema>=4.4.0",
+            ]
+
+        },
         setup_requires=["pytest-runner>=2.0,<3dev"],
-        tests_require=["pytest", "pytest-cov", "coverage>=4.4.2",
-                       "jsonschema>=4.4.0"],
         package_data={
             'linkahead': ['py.typed', 'cert/indiscale.ca.crt', 'schema-pycaosdb-ini.yml'],
         },
diff --git a/src/doc/conf.py b/src/doc/conf.py
index 61a60d7c9e8d5c6b0959f4bba230cd483c06bc79..ce1aa8ff92d18d1ccd0bd408b1ce9e3523d03940 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -25,14 +25,14 @@ import sphinx_rtd_theme  # noqa: E402
 # -- Project information -----------------------------------------------------
 
 project = 'pylinkahead'
-copyright = '2023, IndiScale GmbH'
+copyright = '2024, IndiScale GmbH'
 author = 'Daniel Hornung'
 
 # The short X.Y version
-version = '0.14.1'
+version = '0.17.1'
 # The full version, including alpha/beta/rc tags
 # release = '0.5.2-rc2'
-release = '0.14.1-dev'
+release = '0.17.1-dev'
 
 
 # -- General configuration ---------------------------------------------------
diff --git a/src/doc/configuration.md b/src/doc/configuration.md
index 54ae251b9db9ef000545e701406b979aa58043f8..427551db4e1e97d7ca5f9820df6d5916e3496020 100644
--- a/src/doc/configuration.md
+++ b/src/doc/configuration.md
@@ -1,6 +1,6 @@
 # Configuration of PyLinkAhead #
 The behavior of PyLinkAhead is defined via a configuration that is provided using configuration files.
-PyLinkAhead tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or
+PyLinkAhead tries to read from the inifile specified in the environment variable `PYLINKAHEADINI` or
 alternatively in `~/.pylinkahead.ini` upon import.  After that, the ini file `pylinkahead.ini` in the
 current working directory will be read additionally, if it exists.
 
diff --git a/src/doc/tutorials/complex_data_models.rst b/src/doc/tutorials/complex_data_models.rst
index 569acdae174a9df9d0d2b5eae9a0084d793cc90c..52757c320b42f18b4b24ab9b7575e7bd0becc252 100644
--- a/src/doc/tutorials/complex_data_models.rst
+++ b/src/doc/tutorials/complex_data_models.rst
@@ -51,18 +51,18 @@ Examples
    # Very complex part of the data model:
    # Case 1: File added to another file
    f2.add_property(p1, value=f1)  # this adds a file property with value first file
-		                  # to the second file
+                          # to the second file
 
    # Case 2: Property added to a property
    p2.add_property(p3, value=27)  # this adds an integer property with value 27 to the
-		                  # double property
+                          # double property
 
    # Case 3: Reference property added to a property
    # The property p2 now has two sub properties, one is pointing to
    # record p2 which itself has the property p2, therefore this can be
    # considered a loop in the data model.
    p2.add_property(p4, value=r2)  # this adds a reference property pointing to
-		                  # record 2 to the double property
+                          # record 2 to the double property
 
    # Insert a container containing all the newly created entities:
    c = db.Container().extend([rt1, rt2, r1, r2, f1, p1, p2, p3, f2, p4])
@@ -75,3 +75,70 @@ Examples
    b = input("Press any key to cleanup.")
    # cleanup everything after the user presses any button.
    c.delete()
+
+
+Finding parents and properties
+------------------------------
+To find a specific parent or property of an Entity, its
+ParentList or PropertyList can be filtered using names, ids, or
+entities. A short example:
+
+.. code-block:: python3
+
+   import linkahead as db
+
+   # Setup a record with six properties
+   r = db.Record()
+   p1_1 = db.Property(id=101, name="Property 1")
+   p1_2 = db.Property(name="Property 1")
+   p2_1 = db.Property(id=102, name="Property 2")
+   p2_2 = db.Property(id=102)
+   p2_3 = db.Property(id=102, name="Other Property")
+   p3 = db.Property(id=104, name="Other Property")
+   r.add_property(p1_1).add_property(p1_2).add_property(p2_1)
+   r.add_property(p2_2).add_property(p2_3).add_property(p3)
+   properties = r.properties
+
+   # As r only has one property with id 101, this returns a list containing only p1_1
+   properties.filter_by_identity(pid=101)
+   # Result: [p1_1]
+
+   # Filtering with name="Property 1" returns both p1_1 and p1_2, as they share their name
+   properties.filter_by_identity(name="Property 1")
+   # Result: [p1_1, p1_2]
+
+   #  If both name and pid are given, matching is based only on pid for all entities that have an id
+   properties.filter_by_identity(pid="102", name="Other Property")
+   # Result: [p2_1, p2_2, p2_3]
+
+   # However, filter_by_identity with name="Property 1" and id=101 returns both p1_1 and p1_2, because
+   # p1_2 does not have an id and matches the name
+   properties.filter_by_identity(pid="101", name="Property 1")
+   # Result: [p1_1, p1_2]
+
+   # We can also filter using an entity, in which case the name and id of the entity are used:
+   properties.filter_by_identity(pid="102", name="Property 2") == properties.filter_by_identity(p2_1)
+   # Result: True
+
+   # If we only need properties that match both id and name, we can set the parameter
+   # conjunction to True:
+   properties.filter_by_identity(pid="102", name="Property 2", conjunction=True)
+   # Result: [p2_1]
+
+The filter function of ParentList works analogously.
+
+Finding entities in a Container
+-------------------------------
+In the same way as described above, Container can be filtered.
+A short example:
+
+.. code-block:: python3
+
+   import linkahead as db
+
+   # Setup a record with six properties
+   p1 = db.Property(id=101, name="Property 1")
+   p2 = db.Property(name="Property 2")
+   c = db.Container().extend([p1,p2])
+   c.filter_by_identity(name="Property 1")
+   # Result: [p1]
diff --git a/src/doc/tutorials/index.rst b/src/doc/tutorials/index.rst
index 706e26c2b1b4876c29d43c2bddd9a5fe357a003d..e745482ace189e10a042975869cae6310f6ad703 100644
--- a/src/doc/tutorials/index.rst
+++ b/src/doc/tutorials/index.rst
@@ -15,6 +15,7 @@ advanced usage of the Python client.
    Data-Insertion
    errors
    Entity-Getters
+   paginated_queries
    caching
    data-model-interface
    complex_data_models
diff --git a/src/doc/tutorials/paginated_queries.rst b/src/doc/tutorials/paginated_queries.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c250223f46405caa9d289ce4d8774daf06fdf366
--- /dev/null
+++ b/src/doc/tutorials/paginated_queries.rst
@@ -0,0 +1,63 @@
+Query pagination
+================
+
+When retrieving many entities, you may not want to retrieve all at once, e.g.,
+for performance reasons or to prevent connection timeouts, but rather in a
+chunked way. For that purpose, there is the ``page_length`` parameter in the
+:py:meth:`~linkahead.common.models.execute_query` function. If this is set to a
+non-zero integer, the behavior of the function changes in that it returns a
+Python `generator <https://docs.python.org/3/glossary.html#term-generator>`_
+which can be used, e.g., in loops or in list comprehension. The generator yields
+a :py:class:`~linkahead.common.models.Container` containing the next
+``page_length`` many entities from the query result.
+
+The following example illustrates this on the demo server.
+
+.. code-block:: python
+
+   import linkahead as db
+
+   # 10 at the time of writing of this example
+   print(db.execute_query("FIND MusicalInstrument"))
+
+   # Retrieve in pages of length 5 and iterate over the pages
+   for page in db.execute_query("FIND MusicalInstrument", page_length=5):
+       # each page is a container
+       print(type(page))
+       # exactly page_length=5 for the first N-1 pages,
+       # and possibly less for the last page
+       print(len(page))
+       # the items on each page are subclasses of Entity
+       print(type(page[0]))
+       # The id of the first entity on the page is different for all pages
+       print(page[0].id)
+
+   # You can use this in a list comprehension to fill a container
+   container_paginated = db.Container().extend(
+       [ent for page in db.execute_query("FIND MusicalInstrument", page_length=5) for ent in page]
+   )
+   # The result is the same as in the unpaginated case, but the
+   # following can cause connection timeouts in case of very large
+   # retrievals
+   container_at_once = db.execute_query("FIND MusicalInstrument")
+   for ent1, ent2 in zip(container_paginated, container_at_once):
+      print(ent1.id == ent2.id)  # always true
+
+As you can see, you can iterate over a paginated query and then access the
+entities on each page during the iteration.
+
+.. note::
+
+   The ``page_length`` keyword is ignored for ``COUNT`` queries where
+   :py:meth:`~linkahead.common.models.execute_query` always returns the integer
+   result and in case of ``unique=True`` where always exactly one
+   :py:class:`~linkahead.common.models.Entity` is returned.
+
+
+.. warning::
+
+   Be careful when combining query pagination with insert, update, or delete
+   operations. If your database changes while iterating over a paginated query,
+   the client will raise a
+   :py:exc:`~linkahead.exceptions.PagingConsistencyError` since the server
+   can't guarantee that the query results haven't changed in the meantime.
diff --git a/src/linkahead/__init__.py b/src/linkahead/__init__.py
index 3a8c5ba39c88deaa5dc945135e3828945fd39d58..ac8df123c66ee092801e63a427003fbb7478fc0c 100644
--- a/src/linkahead/__init__.py
+++ b/src/linkahead/__init__.py
@@ -24,7 +24,7 @@
 
 """LinkAhead Python bindings.
 
-Tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or
+Tries to read from the inifile specified in the environment variable `PYLINKAHEADINI` or
 alternatively in `~/.pylinkahead.ini` upon import.  After that, the ini file `pylinkahead.ini` in
 the current working directory will be read additionally, if it exists.
 
@@ -42,7 +42,7 @@ from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, LIST,
                               REFERENCE, TEXT)
 # Import of the basic  API classes:
 from .common.models import (ACL, ALL, FIX, NONE, OBLIGATORY, RECOMMENDED,
-                            SUGGESTED, Container, DropOffBox, Entity, File,
+                            SUGGESTED, Container, DropOffBox, Entity, File, Parent,
                             Info, Message, Permissions, Property, Query,
                             QueryTemplate, Record, RecordType, delete,
                             execute_query, get_global_acl,
@@ -55,7 +55,7 @@ from .utils.get_entity import (get_entity_by_id, get_entity_by_name,
                                get_entity_by_path)
 
 try:
-    from .version import version as __version__
+    from .version import version as __version__  # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
     __version__ = version
diff --git a/src/linkahead/apiutils.py b/src/linkahead/apiutils.py
index 4ae8edd16f1fdc00eb7ba2c17661eea6e114885e..b2a612faea1616c64b7e78575156abccfdb29e61 100644
--- a/src/linkahead/apiutils.py
+++ b/src/linkahead/apiutils.py
@@ -28,10 +28,11 @@
 
 """
 from __future__ import annotations
+
 import logging
 import warnings
 from collections.abc import Iterable
-from typing import Any, Union, Optional
+from typing import Any, Optional, Union
 
 from .common.datatype import is_reference
 from .common.models import (SPECIAL_ATTRIBUTES, Container, Entity, File,
@@ -179,183 +180,329 @@ def getCommitIn(folder):
     return get_commit_in(folder)
 
 
-def compare_entities(old_entity: Entity,
-                     new_entity: Entity,
-                     compare_referenced_records: bool = False
+def compare_entities(entity0: Optional[Entity] = None,
+                     entity1: Optional[Entity] = None,
+                     compare_referenced_records: bool = False,
+                     entity_name_id_equivalency: bool = False,
+                     old_entity: Optional[Entity] = None,
+                     new_entity: Optional[Entity] = None,
                      ) -> tuple[dict[str, Any], dict[str, Any]]:
-    """Compare two entites.
-
-    Return a tuple of dictionaries, the first index belongs to additional information for old
-    entity, the second index belongs to additional information for new entity.
-
-    Additional information means in detail:
-    - Additional parents (a list under key "parents")
-    - Information about properties:
-      - Each property lists either an additional property or a property with a changed:
-        - datatype
-        - importance or
-        - value (not implemented yet)
-
-        In case of changed information the value listed under the respective key shows the
-        value that is stored in the respective entity.
-
-    If `compare_referenced_records` is `True`, also referenced entities will be
-    compared using this function (which is then called with
-    `compare_referenced_records = False` to prevent infinite recursion in case
-    of circular references).
-
-    Parameters
-    ----------
-    old_entity, new_entity : Entity
-        Entities to be compared
-    compare_referenced_records : bool, optional
-        Whether to compare referenced records in case of both, `old_entity` and
-        `new_entity`, have the same reference properties and both have a Record
-        object as value. If set to `False`, only the corresponding Python
-        objects are compared which may lead to unexpected behavior when
-        identical records are stored in different objects. Default is False.
+    """Compare two entities.
+
+    Returns two dicts listing the differences between the two entities. The
+    order of the two returned dicts corresponds to the two input entities.
+    The dicts contain two keys, 'parents' and 'properties'. The list saved
+    under the 'parents' key contains those parents of the respective entity
+    that are missing in the other entity, and the 'properties' dict contains
+    properties and SPECIAL_ATTRIBUTES if they are missing or different from
+    their counterparts in the other entity.
+
+    The key used to represent a parent in the parent list or a
+    property in the property dictionary is the entity's name if the
+    name is present for both compared entities, the id otherwise.
+
+    The value of the properties dict for each listed property is again a dict
+    detailing the differences between this property and its counterpart.
+    The characteristics that are checked to determine whether two properties
+    match are the following:
+
+    - datatype
+    - importance
+    - value
+
+    If any of these characteristics differ for a property, the respective
+    string (datatype, importance, value) is added as a key to the dict of the
+    property with its value being the characteristics value,
+    e.g. {"prop": {"value": 6, 'importance': 'SUGGESTED'}}. Except: None as
+    value is not added to the dict.
+    If a property is of type LIST, the comparison is order-sensitive.
+
+    Comparison of multi-properties is not yet supported, so should either
+    entity have several instances of one Property, the comparison is aborted
+    and an error is raised.
+
+    Two parents match if their name and id are the same, any further
+    differences are ignored.
+
+    Should records referenced in the value field not be checked for equality
+    between the entities but for equivalency, this is possible by setting the
+    parameter compare_referenced_records.
+
+    Params
+    ------
+    entity0:                    Entity
+                                First entity to be compared.
+    entity1:                    Entity
+                                Second entity to be compared.
+    compare_referenced_records: bool, default: False
+                                If set to True, values with referenced records
+                                are not checked for equality but for
+                                equivalency using this function.
+                                compare_referenced_records is set to False for
+                                these recursive calls, so references of
+                                references need to be equal. If set to `False`,
+                                only the Python objects are compared, which may
+                                lead to unexpected behavior.
+    entity_name_id_equivalency: bool, default: False
+                                If set to True, the comparison between an
+                                entity and an int or str also checks whether
+                                the int/str matches the name or id of the
+                                entity, so Entity(id=100) == 100 == "100".
 
     """
-    olddiff: dict[str, Any] = {"properties": {}, "parents": []}
-    newdiff: dict[str, Any] = {"properties": {}, "parents": []}
-
-    if old_entity is new_entity:
-        return (olddiff, newdiff)
-
-    if type(old_entity) is not type(new_entity):
+    # ToDo: Discuss intended behaviour
+    # Questions that need clarification:
+    #    - What is intended behaviour for multi-properties and multi-parents?
+    #    - Do different inheritance levels for parents count as a difference?
+    #    - Do we care about parents and properties of properties?
+    #    - Should there be a more detailed comparison of parents without id?
+    #    - Revisit filter - do we care about RecordType when matching?
+    #      How to treat None?
+    #    - Should matching of parents also take the recordtype into account
+    #      for parents that have a name but no id?
+    # Suggestions for enhancements:
+    #    - For the comparison of entities in value and properties, consider
+    #      keeping a list of traversed entities, not only look at first layer
+    #    - Make the empty_diff functionality faster by adding a parameter to
+    #      this function so that it returns after the first found difference?
+    #    - Add parameter to restrict diff to some characteristics
+    #    - Implement comparison of date where one is a string and the other is
+    #      datetime
+    if entity0 is None and old_entity is None:
+        raise ValueError("Please provide the first entity as first argument (`entity0`)")
+    if entity1 is None and new_entity is None:
+        raise ValueError("Please provide the second entity as second argument (`entity1`)")
+    if old_entity is not None:
+        warnings.warn("Please use 'entity0' instead of 'old_entity'.", DeprecationWarning)
+        if entity0 is not None:
+            raise ValueError("You cannot use both entity0 and old_entity")
+        entity0 = old_entity
+    if new_entity is not None:
+        warnings.warn("Please use 'entity1' instead of 'new_entity'.", DeprecationWarning)
+        if entity1 is not None:
+            raise ValueError("You cannot use both entity1 and new_entity")
+        entity1 = new_entity
+    assert entity0 is not None
+    assert entity1 is not None
+
+    diff: tuple[dict[str, Any], dict[str, Any]] = ({"properties": {}, "parents": []},
+                                                   {"properties": {}, "parents": []})
+
+    if entity0 is entity1:
+        return diff
+
+    if type(entity0) is not type(entity1):
         raise ValueError(
             "Comparison of different Entity types is not supported.")
 
+    # compare special attributes
     for attr in SPECIAL_ATTRIBUTES:
-        try:
-            oldattr = old_entity.__getattribute__(attr)
-            old_entity_attr_exists = True
-        except BaseException:
-            old_entity_attr_exists = False
-        try:
-            newattr = new_entity.__getattribute__(attr)
-            new_entity_attr_exists = True
-        except BaseException:
-            new_entity_attr_exists = False
-
-        if old_entity_attr_exists and (oldattr == "" or oldattr is None):
-            old_entity_attr_exists = False
-
-        if new_entity_attr_exists and (newattr == "" or newattr is None):
-            new_entity_attr_exists = False
-
-        if not old_entity_attr_exists and not new_entity_attr_exists:
+        if attr == "value":
             continue
 
-        if ((old_entity_attr_exists ^ new_entity_attr_exists)
-                or (oldattr != newattr)):
-
-            if old_entity_attr_exists:
-                olddiff[attr] = oldattr
+        attr0 = entity0.__getattribute__(attr)
+        # we consider "" and None to be nonexistent
+        attr0_unset = (attr0 == "" or attr0 is None)
 
-            if new_entity_attr_exists:
-                newdiff[attr] = newattr
+        attr1 = entity1.__getattribute__(attr)
+        # we consider "" and None to be nonexistent
+        attr1_unset = (attr1 == "" or attr1 is None)
 
-    # properties
-
-    for prop in old_entity.properties:
-        matching = [p for p in new_entity.properties if p.name == prop.name]
+        # in both entities the current attribute is not set
+        if attr0_unset and attr1_unset:
+            continue
 
+        # treat datatype separately if one datatype is an object and the other
+        # a string or int, and therefore may be a name or id
+        if attr == "datatype":
+            if not attr0_unset and not attr1_unset:
+                if isinstance(attr0, RecordType):
+                    if attr0.name == attr1:
+                        continue
+                    if str(attr0.id) == str(attr1):
+                        continue
+                if isinstance(attr1, RecordType):
+                    if attr1.name == attr0:
+                        continue
+                    if str(attr1.id) == str(attr0):
+                        continue
+
+        # add to diff if attr has different values or is not set for one entity
+        if (attr0_unset != attr1_unset) or (attr0 != attr1):
+            diff[0][attr] = attr0
+            diff[1][attr] = attr1
+
+    # compare value
+    ent0_val, ent1_val = entity0.value, entity1.value
+    if ent0_val != ent1_val:
+        same_value = False
+
+        # Surround scalar values with a list to avoid code duplication -
+        # this way, the scalar values can be checked against special cases
+        # (compare refs, entity id equivalency etc.) in the list loop
+        if not isinstance(ent0_val, list) and not isinstance(ent1_val, list):
+            ent0_val, ent1_val = [ent0_val], [ent1_val]
+
+        if isinstance(ent0_val, list) and isinstance(ent1_val, list):
+            # lists can't be the same if the lengths are different
+            if len(ent0_val) == len(ent1_val):
+                lists_match = True
+                for val0, val1 in zip(ent0_val, ent1_val):
+                    if val0 == val1:
+                        continue
+                    # Compare Entities
+                    if (compare_referenced_records and
+                            isinstance(val0, Entity) and isinstance(val1, Entity)):
+                        try:
+                            same = empty_diff(val0, val1, False,
+                                              entity_name_id_equivalency)
+                        except (ValueError, NotImplementedError):
+                            same = False
+                        if same:
+                            continue
+                    # Compare Entity name and id
+                    if entity_name_id_equivalency:
+                        if (isinstance(val0, Entity)
+                                and isinstance(val1, (int, str))):
+                            if (str(val0.id) == str(val1)
+                                    or str(val0.name) == str(val1)):
+                                continue
+                        if (isinstance(val1, Entity)
+                                and isinstance(val0, (int, str))):
+                            if (str(val1.id) == str(val0)
+                                    or str(val1.name) == str(val0)):
+                                continue
+                    # val0 and val1 could not be matched
+                    lists_match = False
+                    break
+                if lists_match:
+                    same_value = True
+
+        if not same_value:
+            diff[0]["value"] = entity0.value
+            diff[1]["value"] = entity1.value
+
+    # compare properties
+    for prop in entity0.properties:
+        # ToDo: Would making id default break anything?
+        key = prop.name if prop.name is not None else prop.id
+        matching = entity1.properties.filter_by_identity(prop)
         if len(matching) == 0:
-            olddiff["properties"][prop.name] = {}
+            # entity1 has prop, entity0 does not
+            diff[0]["properties"][key] = {}
         elif len(matching) == 1:
-            newdiff["properties"][prop.name] = {}
-            olddiff["properties"][prop.name] = {}
-
-            if (old_entity.get_importance(prop.name) !=
-                    new_entity.get_importance(prop.name)):
-                olddiff["properties"][prop.name]["importance"] = \
-                    old_entity.get_importance(prop.name)
-                newdiff["properties"][prop.name]["importance"] = \
-                    new_entity.get_importance(prop.name)
-
-            if (prop.datatype != matching[0].datatype):
-                olddiff["properties"][prop.name]["datatype"] = prop.datatype
-                newdiff["properties"][prop.name]["datatype"] = \
-                    matching[0].datatype
-
-            if (prop.unit != matching[0].unit):
-                olddiff["properties"][prop.name]["unit"] = prop.unit
-                newdiff["properties"][prop.name]["unit"] = \
-                    matching[0].unit
-
-            if (prop.value != matching[0].value):
-                # basic comparison of value objects says they are different
-                same_value = False
-                if compare_referenced_records:
-                    # scalar reference
-                    if isinstance(prop.value, Entity) and isinstance(matching[0].value, Entity):
-                        # explicitely not recursive to prevent infinite recursion
-                        same_value = empty_diff(
-                            prop.value, matching[0].value, compare_referenced_records=False)
-                    # list of references
-                    elif isinstance(prop.value, list) and isinstance(matching[0].value, list):
-                        # all elements in both lists actually are entity objects
-                        # TODO: check, whether mixed cases can be allowed or should lead to an error
-                        if (all([isinstance(x, Entity) for x in prop.value])
-                                and all([isinstance(x, Entity) for x in matching[0].value])):
-                            # can't be the same if the lengths are different
-                            if len(prop.value) == len(matching[0].value):
-                                # do a one-by-one comparison:
-                                # the values are the same if all diffs are empty
-                                same_value = all(
-                                    [empty_diff(x, y, False) for x, y
-                                     in zip(prop.value, matching[0].value)])
-
-                if not same_value:
-                    olddiff["properties"][prop.name]["value"] = prop.value
-                    newdiff["properties"][prop.name]["value"] = \
-                        matching[0].value
-
-            if (len(newdiff["properties"][prop.name]) == 0
-                    and len(olddiff["properties"][prop.name]) == 0):
-                newdiff["properties"].pop(prop.name)
-                olddiff["properties"].pop(prop.name)
+            # It's possible that prop has name and id, but match only has id
+            key = prop.name if (prop.name is not None and
+                                matching[0].name == prop.name) else prop.id
+            diff[0]["properties"][key] = {}
+            diff[1]["properties"][key] = {}
+            propdiff = (diff[0]["properties"][key],
+                        diff[1]["properties"][key])
+
+            # We should compare the wrapped properties instead of the
+            # wrapping entities if possible:
+            comp1, comp2 = prop, matching[0]
+            if (comp1._wrapped_entity is not None
+                    and comp2._wrapped_entity is not None):
+                comp1, comp2 = comp1._wrapped_entity, comp2._wrapped_entity
+            # Recursive call to determine the differences between properties
+            # Note: Can lead to infinite recursion if two properties have
+            # themselves or each other as subproperties
+            od, nd = compare_entities(comp1, comp2, compare_referenced_records,
+                                      entity_name_id_equivalency)
+            # We do not care about parents and properties here, discard
+            od.pop("parents")
+            od.pop("properties")
+            nd.pop("parents")
+            nd.pop("properties")
+            # use the remaining diff
+            propdiff[0].update(od)
+            propdiff[1].update(nd)
+
+            # As the importance of a property is an attribute of the record
+            # and not the property, it is not contained in the diff returned
+            # by compare_entities and needs to be added separately
+            if (entity0.get_importance(prop) !=
+                    entity1.get_importance(matching[0])):
+                propdiff[0]["importance"] = entity0.get_importance(prop)
+                propdiff[1]["importance"] = entity1.get_importance(matching[0])
+
+            # in case there is no difference, we remove the dict keys again
+            if len(propdiff[0]) == 0 and len(propdiff[1]) == 0:
+                diff[0]["properties"].pop(key)
+                diff[1]["properties"].pop(key)
 
         else:
             raise NotImplementedError(
                 "Comparison not implemented for multi-properties.")
 
-    for prop in new_entity.properties:
-        if len([0 for p in old_entity.properties if p.name == prop.name]) == 0:
-            newdiff["properties"][prop.name] = {}
-
-    # parents
-
-    for parent in old_entity.parents:
-        if len([0 for p in new_entity.parents if p.name == parent.name]) == 0:
-            olddiff["parents"].append(parent.name)
-
-    for parent in new_entity.parents:
-        if len([0 for p in old_entity.parents if p.name == parent.name]) == 0:
-            newdiff["parents"].append(parent.name)
-
-    return (olddiff, newdiff)
-
+    # we have not yet compared properties that do not exist in entity0
+    for prop in entity1.properties:
+        key = prop.name if prop.name is not None else prop.id
+        # check how often the property appears in entity0
+        num_prop_in_ent0 = len(entity0.properties.filter_by_identity(prop))
+        if num_prop_in_ent0 == 0:
+            # property is only present in entity0 - add to diff
+            diff[1]["properties"][key] = {}
+        if num_prop_in_ent0 > 1:
+            # Check whether the property is present multiple times in entity0
+            # and raise error - result would be incorrect
+            raise NotImplementedError(
+                "Comparison not implemented for multi-properties.")
 
-def empty_diff(old_entity: Entity, new_entity: Entity,
-               compare_referenced_records: bool = False) -> bool:
+    # compare parents
+    for index, parents, other_entity in [(0, entity0.parents, entity1),
+                                         (1, entity1.parents, entity0)]:
+        for parent in parents:
+            key = parent.name if parent.name is not None else parent.id
+            matching = other_entity.parents.filter_by_identity(parent)
+            if len(matching) == 0:
+                diff[index]["parents"].append(key)
+                continue
+
+    return diff
+
+
+def empty_diff(entity0: Entity,
+               entity1: Entity,
+               compare_referenced_records: bool = False,
+               entity_name_id_equivalency: bool = False,
+               old_entity: Optional[Entity] = None,
+               new_entity: Optional[Entity] = None,
+               ) -> bool:
     """Check whether the `compare_entities` found any differences between
-    old_entity and new_entity.
+    entity0 and entity1.
 
     Parameters
     ----------
-    old_entity, new_entity : Entity
+    entity0, entity1 : Entity
         Entities to be compared
     compare_referenced_records : bool, optional
-        Whether to compare referenced records in case of both, `old_entity` and
-        `new_entity`, have the same reference properties and both have a Record
+        Whether to compare referenced records in case of both, `entity0` and
+        `entity1`, have the same reference properties and both have a Record
         object as value.
-
+    entity_name_id_equivalency : bool, optional
+        If set to True, the comparison between an entity and an int or str also
+        checks whether the int/str matches the name or id of the entity, so
+        Entity(id=100) == 100 == "100".
     """
-    olddiff, newdiff = compare_entities(
-        old_entity, new_entity, compare_referenced_records)
-    for diff in [olddiff, newdiff]:
+    if entity0 is None and old_entity is None:
+        raise ValueError("Please provide the first entity as first argument (`entity0`)")
+    if entity1 is None and new_entity is None:
+        raise ValueError("Please provide the second entity as second argument (`entity1`)")
+    if old_entity is not None:
+        warnings.warn("Please use 'entity0' instead of 'old_entity'.", DeprecationWarning)
+        if entity0 is not None:
+            raise ValueError("You cannot use both entity0 and old_entity")
+        entity0 = old_entity
+    if new_entity is not None:
+        warnings.warn("Please use 'entity1' instead of 'new_entity'.", DeprecationWarning)
+        if entity1 is not None:
+            raise ValueError("You cannot use both entity1 and new_entity")
+        entity1 = new_entity
+    e0diff, e1diff = compare_entities(entity0, entity1, compare_referenced_records,
+                                      entity_name_id_equivalency)
+    for diff in [e0diff, e1diff]:
         for key in ["parents", "properties"]:
             if len(diff[key]) > 0:
                 # There is a difference somewhere in the diff
@@ -376,9 +523,9 @@ def merge_entities(entity_a: Entity,
                    ) -> Entity:
     """Merge entity_b into entity_a such that they have the same parents and properties.
 
-    datatype, unit, value, name and description will only be changed in entity_a
-    if they are None for entity_a and set for entity_b. If there is a
-    corresponding value for entity_a different from None, an
+    The attributes datatype, unit, value, name and description will only be changed
+    in entity_a if they are None for entity_a and set for entity_b. If one of those attributes is
+    set in both entities and they differ, then an
     EntityMergeConflictError will be raised to inform about an unresolvable merge
     conflict.
 
@@ -386,8 +533,6 @@ def merge_entities(entity_a: Entity,
 
     Returns entity_a.
 
-    WARNING: This function is currently experimental and insufficiently tested. Use with care.
-
     Parameters
     ----------
     entity_a, entity_b : Entity
@@ -420,12 +565,11 @@ def merge_entities(entity_a: Entity,
 
     """
 
-    logger.warning(
-        "This function is currently experimental and insufficiently tested. Use with care.")
-
     # Compare both entities:
     diff_r1, diff_r2 = compare_entities(
-        entity_a, entity_b, compare_referenced_records=merge_references_with_empty_diffs)
+        entity_a, entity_b,
+        entity_name_id_equivalency=merge_id_with_resolved_entity,
+        compare_referenced_records=merge_references_with_empty_diffs)
 
     # Go through the comparison and try to apply changes to entity_a:
     for key in diff_r2["parents"]:
@@ -445,7 +589,8 @@ def merge_entities(entity_a: Entity,
             for attribute in ("datatype", "unit", "value"):
                 if (attribute in diff_r2["properties"][key] and
                         diff_r2["properties"][key][attribute] is not None):
-                    if (diff_r1["properties"][key][attribute] is None):
+                    if (attribute not in diff_r1["properties"][key] or
+                            diff_r1["properties"][key][attribute] is None):
                         setattr(entity_a.get_property(key), attribute,
                                 diff_r2["properties"][key][attribute])
                     elif force:
@@ -512,43 +657,103 @@ def merge_entities(entity_a: Entity,
     return entity_a
 
 
-def describe_diff(olddiff, newdiff, name=None, as_update=True):
+def describe_diff(entity0_diff: dict[str, Any], entity1_diff: dict[str, Any],
+                  name: Optional[str] = None,
+                  as_update: Optional[bool] = None,
+                  label_e0: str = "first version",
+                  label_e1: str = "second version",
+                  olddiff: Any = None,
+                  newdiff: Any = None,
+                  ) -> str:
+    """
+    Generate a textual description of the differences between two entities.
+    These can be generated using :func:`compare_entities` and used within this function like this:
+
+    `describe_diff(*compare_entities(...))`
+
+    Arguments:
+    ----------
+
+    entity0_diff: dict[str, Any]
+      First element of the tuple output of :func:`compare_entities`.
+      This is referred to as the "first" version.
+
+    entity1_diff: dict[str, Any]
+      Second element of the tuple output of :func:`compare_entities`.
+      This is referred to as the "second" version.
+
+
+    name: Optional[str]
+      Default None. Name of the entity that will be shown in the output text.
+
+    as_update: Optional[bool]
+      Default None. Not used anymore.
+
+    label_e0: str
+      Can be used to set a custom label for the diff that is associated with the first entity.
+
+    label_e1: str
+      Can be used to set a custom label for the diff that is associated with the second entity.
+
+    olddiff: Any
+      Deprecated. Replaced by entity0_diff.
+
+    newdiff: Any
+      Deprecated. Replaced by entity1_diff.
+
+    Returns:
+    --------
+    A text description of the differences.
+
+    """
     description = ""
 
-    for attr in list(set(list(olddiff.keys()) + list(newdiff.keys()))):
+    if as_update:
+        warnings.warn("'as_update' is deprecated. Do not use it.", DeprecationWarning)
+    if olddiff:
+        warnings.warn("'olddiff' is deprecated. Use 'entity0_diff' instead.", DeprecationWarning)
+        entity0_diff = olddiff
+    if newdiff:
+        warnings.warn("'newdiff' is deprecated. Use 'entity1_diff' instead.", DeprecationWarning)
+        entity1_diff = newdiff
+
+    for attr in list(set(list(entity0_diff.keys()) + list(entity1_diff.keys()))):
         if attr == "parents" or attr == "properties":
             continue
         description += "{} differs:\n".format(attr)
-        description += "old version: {}\n".format(
-            olddiff[attr] if attr in olddiff else "not set")
-        description += "new version: {}\n\n".format(
-            newdiff[attr] if attr in newdiff else "not set")
+        description += label_e0 + ": {}\n".format(
+            entity0_diff[attr] if attr in entity0_diff else "not set")
+        description += label_e1 + ": {}\n\n".format(
+            entity1_diff[attr] if attr in entity1_diff else "not set")
 
-    if len(olddiff["parents"]) > 0:
-        description += ("Parents that are only in the old version:\n"
-                        + ", ".join(olddiff["parents"]) + "\n")
+    if len(entity0_diff["parents"]) > 0:
+        description += ("Parents that are only in the " + label_e0 + ":\n"
+                        + ", ".join(entity0_diff["parents"]) + "\n")
 
-    if len(newdiff["parents"]) > 0:
-        description += ("Parents that are only in the new version:\n"
-                        + ", ".join(olddiff["parents"]) + "\n")
+    if len(entity1_diff["parents"]) > 0:
+        description += ("Parents that are only in the " + label_e1 + ":\n"
+                        + ", ".join(entity0_diff["parents"]) + "\n")
 
-    for prop in list(set(list(olddiff["properties"].keys())
-                         + list(newdiff["properties"].keys()))):
+    for prop in list(set(list(entity0_diff["properties"].keys())
+                         + list(entity1_diff["properties"].keys()))):
         description += "property {} differs:\n".format(prop)
 
-        if prop not in olddiff["properties"]:
-            description += "it does not exist in the old version: \n"
-        elif prop not in newdiff["properties"]:
-            description += "it does not exist in the new version: \n"
+        if prop not in entity0_diff["properties"]:
+            description += "it does not exist in the " + label_e0 + ":\n"
+        elif prop not in entity1_diff["properties"]:
+            description += "it does not exist in the " + label_e1 + ":\n"
         else:
-            description += "old version: {}\n".format(
-                olddiff["properties"][prop])
-            description += "new version: {}\n\n".format(
-                newdiff["properties"][prop])
+            description += label_e0 + ": {}\n".format(
+                entity0_diff["properties"][prop])
+            description += label_e1 + ": {}\n\n".format(
+                entity1_diff["properties"][prop])
 
     if description != "":
-        description = ("## Difference between the old and the new "
-                       "version of {}\n\n".format(name))+description
+        description = ("## Difference between the " +
+                       label_e0 +
+                       " and the " +
+                       label_e1 +
+                       " of {}\n\n".format(name)) + description
 
     return description
 
diff --git a/src/linkahead/cached.py b/src/linkahead/cached.py
index cf1d1d34362335f87c5eca094b5aa9d6b750f68d..11cb959ba10fd507c39eb4d1ddd00bf478859852 100644
--- a/src/linkahead/cached.py
+++ b/src/linkahead/cached.py
@@ -107,7 +107,7 @@ If a query phrase is given, the result must be unique.  If this is not what you
 def cached_query(query_string: str) -> Container:
     """A cached version of :func:`linkahead.execute_query<linkahead.common.models.execute_query>`.
 
-All additional arguments are at their default values.
+    All additional arguments are at their default values.
 
     """
     result = _cached_access(AccessType.QUERY, query_string, unique=False)
@@ -116,7 +116,7 @@ All additional arguments are at their default values.
     return result
 
 
-@ lru_cache(maxsize=DEFAULT_SIZE)
+@lru_cache(maxsize=DEFAULT_SIZE)
 def _cached_access(kind: AccessType, value: Union[str, int], unique: bool = True):
     # This is the function that is actually cached.
     # Due to the arguments, the cache has kind of separate sections for cached_query and
@@ -161,11 +161,12 @@ def cache_clear() -> None:
 def cache_info():
     """Return info about the cache that is used by `cached_query` and `cached_get_entity_by`.
 
-Returns
--------
+    Returns
+    -------
 
-out: named tuple
-  See the standard library :func:`functools.lru_cache` for details."""
+    out: named tuple
+      See the standard library :func:`functools.lru_cache` for details.
+    """
     return _cached_access.cache_info()
 
 
@@ -188,21 +189,21 @@ def cache_fill(items: dict[Union[str, int], Any],
     This allows to fill the cache without actually submitting queries.  Note that this does not
     overwrite existing entries with the same keys.
 
-Parameters
-----------
+    Parameters
+    ----------
 
-items: dict
-  A dictionary with the entries to go into the cache.  The keys must be compatible with the
-  AccessType given in ``kind``
+    items: dict
+      A dictionary with the entries to go into the cache.  The keys must be compatible with the
+      AccessType given in ``kind``
 
-kind: AccessType, optional
-  The AccessType, for example ID, name, path or query.
+    kind: AccessType, optional
+      The AccessType, for example ID, name, path or query.
 
-unique: bool, optional
-  If True, fills the cache for :func:`cached_get_entity_by`, presumably with
-  :class:`linkahead.Entity<linkahead.common.models.Entity>` objects.  If False, the cache should be
-  filled with :class:`linkahead.Container<linkahead.common.models.Container>` objects, for use with
-  :func:`cached_query`.
+    unique: bool, optional
+      If True, fills the cache for :func:`cached_get_entity_by`, presumably with
+      :class:`linkahead.Entity<linkahead.common.models.Entity>` objects.  If False, the cache should be
+      filled with :class:`linkahead.Container<linkahead.common.models.Container>` objects, for use with
+      :func:`cached_query`.
 
     """
 
diff --git a/src/linkahead/common/administration.py b/src/linkahead/common/administration.py
index dee341fa84dd85cbd41a77c0e2d510a96f2c4824..9d9d4f013f1ad10cd0957cfb9a9e4f2f44bd6102 100644
--- a/src/linkahead/common/administration.py
+++ b/src/linkahead/common/administration.py
@@ -91,7 +91,7 @@ def get_server_properties() -> dict[str, Optional[str]]:
     props: dict[str, Optional[str]] = dict()
 
     for elem in xml.getroot():
-        props[elem.tag] = elem.text
+        props[str(elem.tag)] = str(elem.text)
 
     return props
 
@@ -156,7 +156,10 @@ def generate_password(length: int):
 def _retrieve_user(name: str, realm: Optional[str] = None, **kwargs):
     con = get_connection()
     try:
-        return con._http_request(method="GET", path="User/" + (realm + "/" + name if realm is not None else name), **kwargs).read()
+        return con._http_request(
+            method="GET",
+            path="User/" + (realm + "/" + name if realm is not None else name),
+            **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to retrieve this user."
         raise
@@ -198,7 +201,9 @@ def _update_user(name: str,
     if entity is not None:
         params["entity"] = str(entity)
     try:
-        return con.put_form_data(entity_uri_segment="User/" + (realm + "/" + name if realm is not None else name), params=params, **kwargs).read()
+        return con.put_form_data(entity_uri_segment="User/" + (realm + "/" +
+                                                               name if realm is not None else name),
+                                 params=params, **kwargs).read()
     except HTTPResourceNotFoundError as e:
         e.msg = "User does not exist."
         raise e
@@ -246,7 +251,9 @@ def _insert_user(name: str,
 def _insert_role(name, description, **kwargs):
     con = get_connection()
     try:
-        return con.post_form_data(entity_uri_segment="Role", params={"role_name": name, "role_description": description}, **kwargs).read()
+        return con.post_form_data(entity_uri_segment="Role",
+                                  params={"role_name": name, "role_description": description},
+                                  **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to insert a new role."
         raise
@@ -259,7 +266,9 @@ def _insert_role(name, description, **kwargs):
 def _update_role(name, description, **kwargs):
     con = get_connection()
     try:
-        return con.put_form_data(entity_uri_segment="Role/" + name, params={"role_description": description}, **kwargs).read()
+        return con.put_form_data(entity_uri_segment="Role/" + name,
+                                 params={"role_description": description},
+                                 **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to update this role."
         raise
@@ -301,8 +310,10 @@ def _set_roles(username, roles, realm=None, **kwargs):
     body = xml2str(xml)
     con = get_connection()
     try:
-        body = con._http_request(method="PUT", path="UserRoles/" + (realm + "/" +
-                                                                    username if realm is not None else username), body=body, **kwargs).read()
+        body = con._http_request(method="PUT",
+                                 path="UserRoles/" + (realm + "/" +
+                                                      username if realm is not None else username),
+                                 body=body, **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to set this user's roles."
         raise
@@ -345,20 +356,20 @@ def _get_roles(username, realm=None, **kwargs):
 def _set_permissions(role, permission_rules, **kwargs):
     """Set permissions for a role.
 
-Parameters
-----------
+    Parameters
+    ----------
 
-role : str
-    The role for which the permissions are set.
+    role : str
+        The role for which the permissions are set.
 
-permission_rules : iterable<PermissionRule>
-    An iterable with PermissionRule objects.
+    permission_rules : iterable<PermissionRule>
+        An iterable with PermissionRule objects.
 
-**kwargs :
-    Additional arguments which are passed to the HTTP request.
+    **kwargs :
+        Additional arguments which are passed to the HTTP request.
 
-Returns
--------
+    Returns
+    -------
     None
     """
     xml = etree.Element("PermissionRules")
@@ -369,7 +380,8 @@ Returns
     body = xml2str(xml)
     con = get_connection()
     try:
-        return con._http_request(method="PUT", path="PermissionRules/" + role, body=body, **kwargs).read()
+        return con._http_request(method="PUT", path="PermissionRules/" + role, body=body,
+                                 **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to set this role's permissions."
         raise
@@ -381,7 +393,9 @@ Returns
 def _get_permissions(role, **kwargs):
     con = get_connection()
     try:
-        return PermissionRule._parse_body(con._http_request(method="GET", path="PermissionRules/" + role, **kwargs).read())
+        return PermissionRule._parse_body(con._http_request(method="GET",
+                                                            path="PermissionRules/" + role,
+                                                            **kwargs).read())
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to retrieve this role's permissions."
         raise
@@ -393,15 +407,15 @@ def _get_permissions(role, **kwargs):
 class PermissionRule():
     """Permission rules.
 
-Parameters
-----------
-action : str
-    Either "grant" or "deny"
+    Parameters
+    ----------
+    action : str
+        Either "grant" or "deny"
 
-permission : str
-    For example ``RETRIEVE:*``.
+    permission : str
+        For example ``RETRIEVE:*``.
 
-priority : bool, optional
+    priority : bool, optional
     Whether the priority shall be set, defaults is False.
     """
 
@@ -429,7 +443,8 @@ priority : bool, optional
         if permission is None:
             raise ValueError(f"Permission is missing in PermissionRule xml: {elem}")
         priority = PermissionRule._parse_boolean(elem.get("priority"))
-        return PermissionRule(elem.tag, permission, priority if priority is not None else False)
+        return PermissionRule(str(elem.tag), permission,
+                              priority if priority is not None else False)
 
     @staticmethod
     def _parse_body(body: str):
diff --git a/src/linkahead/common/models.py b/src/linkahead/common/models.py
index 243323572122830c61a789974d79fb176b2d86d9..b3acd770d0f0960c599144032f028a68b4bd3ad5 100644
--- a/src/linkahead/common/models.py
+++ b/src/linkahead/common/models.py
@@ -37,26 +37,25 @@ from __future__ import annotations  # Can be removed with 3.10.
 
 import re
 import sys
+import warnings
 from builtins import str
 from copy import deepcopy
+from datetime import date, datetime
 from functools import cmp_to_key
 from hashlib import sha512
 from os import listdir
 from os.path import isdir
 from random import randint
 from tempfile import NamedTemporaryFile
-
-from typing import TYPE_CHECKING
-from typing import Any, Final, Literal, Optional, TextIO, Union
+from typing import TYPE_CHECKING, Any, Final, Literal, Optional, TextIO, Union
 
 if TYPE_CHECKING:
-    from datetime import datetime
-    from .datatype import DATATYPE
-    from tempfile import _TemporaryFileWrapper
     from io import BufferedWriter
     from os import PathLike
-    QueryDict = dict[str, Optional[str]]
+    from tempfile import _TemporaryFileWrapper
 
+    from .datatype import DATATYPE
+    QueryDict = dict[str, Optional[str]]
 
 from warnings import warn
 
@@ -65,36 +64,17 @@ from lxml import etree
 from ..configuration import get_config
 from ..connection.connection import get_connection
 from ..connection.encode import MultipartParam, multipart_encode
-from ..exceptions import (
-    AmbiguousEntityError,
-    AuthorizationError,
-    ConsistencyError,
-    EmptyUniqueQueryError,
-    EntityDoesNotExistError,
-    EntityError,
-    EntityHasNoAclError,
-    EntityHasNoDatatypeError,
-    HTTPURITooLongError,
-    LinkAheadConnectionError,
-    LinkAheadException,
-    MismatchingEntitiesError,
-    PagingConsistencyError,
-    QueryNotUniqueError,
-    TransactionError,
-    UniqueNamesError,
-    UnqualifiedParentsError,
-    UnqualifiedPropertiesError,
-)
-from .datatype import (
-    BOOLEAN,
-    DATETIME,
-    DOUBLE,
-    INTEGER,
-    TEXT,
-    get_list_datatype,
-    is_list_datatype,
-    is_reference,
-)
+from ..exceptions import (AmbiguousEntityError, AuthorizationError,
+                          ConsistencyError, EmptyUniqueQueryError,
+                          EntityDoesNotExistError, EntityError,
+                          EntityHasNoAclError, EntityHasNoDatatypeError,
+                          HTTPURITooLongError, LinkAheadConnectionError,
+                          LinkAheadException, MismatchingEntitiesError,
+                          PagingConsistencyError, QueryNotUniqueError,
+                          TransactionError, UniqueNamesError,
+                          UnqualifiedParentsError, UnqualifiedPropertiesError)
+from .datatype import (BOOLEAN, DATETIME, DOUBLE, INTEGER, TEXT,
+                       get_list_datatype, is_list_datatype, is_reference)
 from .state import State
 from .timezone import TimeZone
 from .utils import uuid, xml2str
@@ -114,8 +94,8 @@ if TYPE_CHECKING:
     IMPORTANCE = Literal["OBLIGATORY", "RECOMMENDED", "SUGGESTED", "FIX", "NONE"]
     ROLE = Literal["Entity", "Record", "RecordType", "Property", "File"]
 
-SPECIAL_ATTRIBUTES = ["name", "role", "datatype", "description",
-                      "id", "path", "checksum", "size", "value"]
+SPECIAL_ATTRIBUTES = ["name", "role", "datatype", "description", "file",
+                      "id", "path", "checksum", "size", "value", "unit"]
 
 
 class Entity:
@@ -138,10 +118,10 @@ class Entity:
         description: Optional[str] = None,  # @ReservedAssignment
         datatype: Optional[DATATYPE] = None,
         value=None,
-        **kwargs,
+        role=None,
     ):
 
-        self.__role: Optional[ROLE] = kwargs["role"] if "role" in kwargs else None
+        self.__role: Optional[ROLE] = role
         self._checksum: Optional[str] = None
         self._size = None
         self._upload = None
@@ -156,8 +136,8 @@ class Entity:
         self.datatype: Optional[DATATYPE] = datatype
         self.value = value
         self.messages = Messages()
-        self.properties = _Properties()
-        self.parents = _ParentList()
+        self.properties = PropertyList()
+        self.parents = ParentList()
         self.path: Optional[str] = None
         self.file: Optional[File] = None
         self.unit: Optional[str] = None
@@ -258,7 +238,9 @@ class Entity:
     @id.setter
     def id(self, new_id) -> None:
         if new_id is not None:
-            self.__id: Optional[int] = int(new_id)
+            if not isinstance(new_id, int):
+                new_id = int(new_id)
+            self.__id: Optional[int] = new_id
         else:
             self.__id = None
 
@@ -367,6 +349,15 @@ class Entity:
     def pickup(self, new_pickup):
         self.__pickup = new_pickup
 
+    @property   # getter for _cuid
+    def cuid(self):
+        # Set if None?
+        return self._cuid
+
+    @property   # getter for _flags
+    def flags(self):
+        return self._flags.copy()   # for dict[str, str] shallow copy is enough
+
     def grant(
         self,
         realm: Optional[str] = None,
@@ -514,6 +505,10 @@ class Entity:
 
         return self
 
+    def get_versionid(self):
+        """Returns the concatenation of ID and version"""
+        return str(self.id)+"@"+str(self.version.id)
+
     def get_importance(self, property):  # @ReservedAssignment
         """Get the importance of a given property regarding this entity."""
 
@@ -871,29 +866,29 @@ class Entity:
         check. Note that, if checked, name or ID should not be None,
         lest the check fail.
 
-Parameters
-----------
+        Parameters
+        ----------
 
-parent: Entity
-  Check for this parent.
+        parent: Entity
+          Check for this parent.
 
-recursive: bool, optional
-  Whether to check recursively.
+        recursive: bool, optional
+          Whether to check recursively.
 
-check_name: bool, optional
-  Whether to use the name for ancestry check.
+        check_name: bool, optional
+          Whether to use the name for ancestry check.
 
-check_id: bool, optional
-  Whether to use the ID for ancestry check.
+        check_id: bool, optional
+          Whether to use the ID for ancestry check.
 
-retrieve: bool, optional
-  If False, do not retrieve parents from the server.
+        retrieve: bool, optional
+          If False, do not retrieve parents from the server.
 
-Returns
--------
-out: bool
-  True if ``parent`` is a true parent, False otherwise.
-"""
+        Returns
+        -------
+        out: bool
+          True if ``parent`` is a true parent, False otherwise.
+        """
 
         if recursive:
             parents = self.get_parents_recursively(retrieve=retrieve)
@@ -920,7 +915,7 @@ out: bool
     def get_parents(self):
         """Get all parents of this entity.
 
-        @return: _ParentList(list)
+        @return: ParentList(list)
         """
 
         return self.parents
@@ -928,17 +923,17 @@ out: bool
     def get_parents_recursively(self, retrieve: bool = True) -> list[Entity]:
         """Get all ancestors of this entity.
 
-Parameters
-----------
+        Parameters
+        ----------
 
-retrieve: bool, optional
-  If False, do not retrieve parents from the server.
+        retrieve: bool, optional
+          If False, do not retrieve parents from the server.
 
-Returns
--------
-out: list[Entity]
-  The parents of this Entity
-"""
+        Returns
+        -------
+        out: list[Entity]
+          The parents of this Entity
+        """
 
         all_parents: list[Entity] = []
         self._get_parent_recursively(all_parents, retrieve=retrieve)
@@ -1020,7 +1015,7 @@ out: list[Entity]
     def get_properties(self):
         """Get all properties of this entity.
 
-        @return: _Properties(list)
+        @return: PropertyList(list)
         """
 
         return self.properties
@@ -1137,7 +1132,7 @@ out: list[Entity]
         else:
             return getattr(ref, special_selector.lower())
 
-    def get_property_values(self, *selectors):
+    def get_property_values(self, *selectors) -> tuple:
         """ Return a tuple with the values described by the given selectors.
 
         This represents an entity's properties as if it was a row of a table
@@ -1250,6 +1245,7 @@ out: list[Entity]
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[list] = None
     ) -> etree._Element:
         """Generate an xml representation of this entity. If the parameter xml
         is given, all attributes, parents, properties, and messages of this
@@ -1257,14 +1253,25 @@ out: list[Entity]
 
         Raise an error if xml is not a lxml.etree.Element
 
-        @param xml: an xml element to which all attributes, parents,
-            properties, and messages
-            are to be added.
-
-        FIXME: Add documentation for the add_properties parameter.
-        FIXME: Add docuemntation for the local_serialization parameter.
+        Parameters
+        ----------
+        xml : etree._Element, optional
+            an xml element to which all attributes, parents,
+            properties, and messages are to be added. Default is None.
+        visited_entities : list, optional
+            list of enties that are being printed for recursion check,
+            should never be set manually. Default is None.
+        add_properties : INHERITANCE, optional
+            FIXME: Add documentation for the add_properties
+            parameter. Default is "ALL".
+        local_serialization : bool, optional
+            FIXME: Add documentation for the local_serialization
+            parameter. Default is False.
 
-        @return: xml representation of this entity.
+        Returns
+        -------
+        xml : etree._Element
+            xml representation of this entity.
         """
 
         if xml is None:
@@ -1273,9 +1280,17 @@ out: list[Entity]
             xml = etree.Element(elem_tag)
         assert isinstance(xml, etree._Element)
 
+        if visited_entities is None:
+            visited_entities = []
+        if self in visited_entities:
+            xml.text = xml2str(etree.Comment("Recursive reference"))
+            return xml
+        visited_entities.append(self)
+
         # unwrap wrapped entity
         if self._wrapped_entity is not None:
-            xml = self._wrapped_entity.to_xml(xml, add_properties)
+            xml = self._wrapped_entity.to_xml(xml, add_properties,
+                                              visited_entities=visited_entities.copy())
 
         if self.id is not None:
             xml.set("id", str(self.id))
@@ -1290,6 +1305,10 @@ out: list[Entity]
             xml.set("description", str(self.description))
 
         if self.version is not None:
+            # If this ever causes problems, we might add
+            # visited_entities support here since it does have some
+            # recursion with predecessors / successors. But should be
+            # fine for now, since it is always set by the server.
             xml.append(self.version.to_xml())
 
         if self.value is not None:
@@ -1299,7 +1318,8 @@ out: list[Entity]
                 elif self.value.name is not None:
                     xml.text = str(self.value.name)
                 else:
-                    xml.text = str(self.value)
+                    dt_str = xml2str(self.value.to_xml(visited_entities=visited_entities.copy()))
+                    xml.text = dt_str
             elif isinstance(self.value, list):
                 for v in self.value:
                     v_elem = etree.Element("Value")
@@ -1310,7 +1330,8 @@ out: list[Entity]
                         elif v.name is not None:
                             v_elem.text = str(v.name)
                         else:
-                            v_elem.text = str(v)
+                            dt_str = xml2str(v.to_xml(visited_entities=visited_entities.copy()))
+                            v_elem.text = dt_str
                     elif v == "":
                         v_elem.append(etree.Element("EmptyString"))
                     elif v is None:
@@ -1332,7 +1353,11 @@ out: list[Entity]
                 elif self.datatype.name is not None:
                     xml.set("datatype", str(self.datatype.name))
                 else:
-                    xml.set("datatype", str(self.datatype))
+                    dt_str = xml2str(self.datatype.to_xml(visited_entities=visited_entities.copy()))
+                    # Todo: Use for pretty-printing with calls from _repr_ only?
+                    # dt_str = dt_str.replace('<', 'ᐸ').replace('>', 'ᐳ').replace(' ', '⠀').replace(
+                    # '"', '\'').replace('\n', '')
+                    xml.set("datatype", dt_str)
             else:
                 xml.set("datatype", str(self.datatype))
 
@@ -1355,10 +1380,11 @@ out: list[Entity]
             self.messages.to_xml(xml)
 
         if self.parents is not None:
-            self.parents.to_xml(xml)
+            self.parents.to_xml(xml, visited_entities=visited_entities.copy())
 
         if self.properties is not None:
-            self.properties.to_xml(xml, add_properties)
+            self.properties.to_xml(xml, add_properties,
+                                   visited_entities=visited_entities.copy())
 
         if len(self._flags) > 0:
             flagattr = ""
@@ -1596,15 +1622,15 @@ out: list[Entity]
                unique=True, flags=None, sync=True):
         """Update this entity.
 
-There are two possible work-flows to perform this update:
-First:
-    1) retrieve an entity
-    2) do changes
-    3) call update method
+        There are two possible work-flows to perform this update:
+        First:
+            1) retrieve an entity
+            2) do changes
+            3) call update method
 
-Second:
-    1) construct entity with id
-    2) call update method.
+        Second:
+            1) construct entity with id
+            2) call update method.
 
         For slight changes the second one it is more comfortable. Furthermore, it is possible to
         stay off-line until calling the update method. The name, description, unit, datatype, path,
@@ -1685,6 +1711,9 @@ def _parse_value(datatype, value):
         if isinstance(value, str):
             return value
 
+    if datatype == DATETIME and (isinstance(value, date) or isinstance(value, datetime)):
+        return value
+
     # deal with collections
     if isinstance(datatype, str):
         matcher = re.compile(r"^(?P<col>[^<]+)<(?P<dt>[^>]+)>$")
@@ -1876,12 +1905,12 @@ class QueryTemplate():
 
     @staticmethod
     def _from_xml(xml: etree._Element):
-        if xml.tag.lower() == "querytemplate":
+        if str(xml.tag).lower() == "querytemplate":
             q = QueryTemplate(name=xml.get("name"),
                               description=xml.get("description"), query=None)
 
             for e in xml:
-                if e.tag.lower() == "query":
+                if str(e.tag).lower() == "query":
                     q.query = e.text
                 else:
                     child = _parse_single_xml_element(e)
@@ -1918,7 +1947,7 @@ class QueryTemplate():
         ret = Messages()
 
         for m in self.messages:
-            if m.type.lower() == "error":
+            if str(m.type).lower() == "error":
                 ret.append(m)
 
         return ret
@@ -1964,11 +1993,16 @@ class Parent(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "NONE",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Parent")
 
-        return super().to_xml(xml=xml, add_properties=add_properties)
+        if visited_entities is None:
+            visited_entities = []
+
+        return super().to_xml(xml=xml, add_properties=add_properties,
+                              visited_entities=visited_entities)
 
 
 class _EntityWrapper(object):
@@ -1997,8 +2031,6 @@ class Property(Entity):
 
         Parameters
         ----------
-       Parameters
-        ----------
         parent : Entity or int or str or None
             The parent entity, either specified by the Entity object
             itself, or its id or its name. Default is None.
@@ -2041,14 +2073,19 @@ class Property(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Property")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return super(Property, self).to_xml(
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities,
         )
 
     def is_reference(self, server_retrieval: bool = False) -> Optional[bool]:
@@ -2206,15 +2243,20 @@ class RecordType(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ) -> etree._Element:
         if xml is None:
             xml = etree.Element("RecordType")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return Entity.to_xml(
             self,
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities,
         )
 
 
@@ -2245,14 +2287,19 @@ class Record(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Record")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return super().to_xml(
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities
         )
 
 
@@ -2322,6 +2369,7 @@ class File(Record):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ) -> etree._Element:
         """Convert this file to an xml element.
 
@@ -2331,8 +2379,12 @@ class File(Record):
         if xml is None:
             xml = etree.Element("File")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return Entity.to_xml(self, xml=xml, add_properties=add_properties,
-                             local_serialization=local_serialization)
+                             local_serialization=local_serialization,
+                             visited_entities=visited_entities)
 
     def download(self, target: Optional[str] = None) -> str:
         """Download this file-entity's actual file from the file server. It
@@ -2420,11 +2472,14 @@ class File(Record):
             value=value, unit=unit, importance=importance, inheritance=inheritance)
 
 
-class _Properties(list):
-    """FIXME: Add docstring."""
+class PropertyList(list):
+    """A list class for Property objects
 
-    def __init__(self):
-        list.__init__(self)
+    This class provides addional functionality like get/set_importance or get_by_name.
+    """
+
+    def __init__(self) -> None:
+        super().__init__()
         self._importance: dict[Entity, IMPORTANCE] = dict()
         self._inheritance: dict[Entity, INHERITANCE] = dict()
         self._element_by_name: dict[str, Entity] = dict()
@@ -2491,15 +2546,20 @@ class _Properties(list):
 
         return self
 
-    def to_xml(self, add_to_element: etree._Element, add_properties: INHERITANCE):
+    def to_xml(self, add_to_element: etree._Element, add_properties: INHERITANCE,
+               visited_entities: Optional[Union[list, None]] = None):
+
+        if visited_entities is None:
+            visited_entities = []
+
         p: Property
         for p in self:
             importance = self._importance.get(p)
 
             if add_properties == FIX and not importance == FIX:
                 continue
-
-            pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX)
+            pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX,
+                             visited_entities=visited_entities.copy())
 
             if p in self._importance:
                 pelem.set("importance", str(importance))
@@ -2517,6 +2577,42 @@ class _Properties(list):
 
         return xml2str(xml)
 
+    def filter(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This function was renamed to filter_by_identity."))
+        return self.filter_by_identity(*args, **kwargs)
+
+    def filter_by_identity(self, prop: Optional[Property] = None,
+                           pid: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
+        """
+        Return all Properties from the given PropertyList that match the
+        selection criteria.
+
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
+        description of behaviour.
+
+        Params
+        ------
+        prop              : Property
+                            Property to match name and ID with. Cannot be set
+                            simultaneously with ID or name.
+        pid               : str, int
+                            Property ID to match
+        name              : str
+                            Property name to match
+        conjunction       : bool, defaults to False
+                            Set to return only entities that match both id and name
+                            if both are given.
+
+        Returns
+        -------
+        matches          : list
+                           List containing all matching Properties
+        """
+        return _filter_entity_list_by_identity(self, pid=pid, name=name, entity=prop,
+                                               conjunction=conjunction)
+
     def _get_entity_by_cuid(self, cuid: str):
         '''
         Get the first entity which has the given cuid.
@@ -2574,9 +2670,7 @@ class _Properties(list):
         raise KeyError(str(prop) + " not found.")
 
 
-class _ParentList(list):
-    # TODO unclear why this class is private. Isn't it use full for users?
-
+class ParentList(list):
     def _get_entity_by_cuid(self, cuid):
         '''
         Get the first entity which has the given cuid.
@@ -2591,8 +2685,8 @@ class _ParentList(list):
                     return e
         raise KeyError("No entity with that cuid in this container.")
 
-    def __init__(self):
-        list.__init__(self)
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
         self._element_by_name = dict()
         self._element_by_id = dict()
 
@@ -2605,22 +2699,21 @@ class _ParentList(list):
         if isinstance(parent, list):
             for p in parent:
                 self.append(p)
-
             return
 
         if isinstance(parent, Entity):
-            if parent.id:
-                self._element_by_id[str(parent.id)] = parent
-
-            if parent.name:
-                self._element_by_name[parent.name] = parent
             list.append(self, parent)
         else:
             raise TypeError("Argument was not an Entity")
 
         return self
 
-    def to_xml(self, add_to_element: etree._Element):
+    def to_xml(self, add_to_element: etree._Element,
+               visited_entities: Optional[Union[list, None]] = None):
+
+        if visited_entities is None:
+            visited_entities = []
+
         for p in self:
             pelem = etree.Element("Parent")
 
@@ -2655,7 +2748,59 @@ class _ParentList(list):
 
         return xml2str(xml)
 
+    def filter(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This function was renamed to filter_by_identity."))
+        return self.filter_by_identity(*args, **kwargs)
+
+    def filter_by_identity(self, parent: Optional[Parent] = None,
+                           pid: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
+        """
+        Return all Parents from the given ParentList that match the selection
+        criteria.
+
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
+        description of behaviour.
+
+        Params
+        ------
+        listobject        : Iterable(Parent)
+                            List to be filtered
+        parent            : Parent
+                            Parent to match name and ID with. Cannot be set
+        pid               : str, int
+                            Parent ID to match
+        name              : str
+                            Parent name to match
+                            simultaneously with ID or name.
+        conjunction       : bool, defaults to False
+                            Set to return only entities that match both id and name
+                            if both are given.
+
+        Returns
+        -------
+        matches          : list
+                           List containing all matching Parents
+        """
+        return _filter_entity_list_by_identity(self, pid=pid, name=name, entity=parent,
+                                               conjunction=conjunction)
+
     def remove(self, parent: Union[Entity, int, str]):
+        """
+        Remove first occurrence of parent.
+
+        Parameters
+        ----------
+        parent: Union[Entity, int, str], the parent to be removed identified via ID or name. If a
+        Parent object is provided the ID and then the name is used to identify the parent to be
+        removed.
+
+        Returns
+        -------
+        None
+        """
+
         if isinstance(parent, Entity):
             if parent in self:
                 list.remove(self, parent)
@@ -2673,11 +2818,11 @@ class _ParentList(list):
                     # by name
 
                     for e in self:
-                        if e.name is not None and e.name == parent.name:
+                        if e.name is not None and str(e.name).lower() == str(parent.name).lower():
                             list.remove(self, e)
 
                             return
-        elif hasattr(parent, "encode"):
+        elif isinstance(parent, str):
             # by name
 
             for e in self:
@@ -2696,6 +2841,19 @@ class _ParentList(list):
         raise KeyError(str(parent) + " not found.")
 
 
+class _Properties(PropertyList):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is deprecated. Please use PropertyList."))
+        super().__init__(*args, **kwargs)
+
+
+class _ParentList(ParentList):
+    def __init__(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This class is deprecated. Please use ParentList "
+                                         "(without underscore)."))
+        super().__init__(*args, **kwargs)
+
+
 class Messages(list):
     """This specialization of list stores error, warning, info, and other
     messages. The mentioned three messages types play a special role.
@@ -2956,12 +3114,12 @@ def _basic_sync(e_local, e_remote):
     if e_local.role is None:
         e_local.role = e_remote.role
     elif e_remote.role is not None and not e_local.role.lower() == e_remote.role.lower():
-        raise ValueError("The resulting entity had a different role ({0}) "
-                         "than the local one ({1}). This probably means, that "
+        raise ValueError(f"The resulting entity had a different role ({e_remote.role}) "
+                         f"than the local one ({e_local.role}). This probably means, that "
                          "the entity was intialized with a wrong class "
                          "by this client or it has changed in the past and "
-                         "this client did't know about it yet.".format(
-                             e_remote.role, e_local.role))
+                         "this client did't know about it yet.\nThis is the local version of the"
+                         f" Entity:\n{e_local}\nThis is the remote one:\n{e_remote}")
 
     e_local.id = e_remote.id
     e_local.name = e_remote.name
@@ -3191,9 +3349,15 @@ class Container(list):
         """Get an xml tree representing this Container or append all entities
         to the given xml element.
 
-        @param add_to_element=None: optional element to which all entities of this container is to
-               be appended.
-        @return xml element
+        Parameters
+        ----------
+        add_to_element : etree._Element, optional
+            optional element to which all entities of this container is to
+            be appended. Default is None
+
+        Returns
+        -------
+        xml_element : etree._Element
         """
         tmpid = 0
 
@@ -3205,6 +3369,7 @@ class Container(list):
 
         if add_to_element is None:
             add_to_element = etree.Element("Entities")
+        noscript_in_supplied_xml = list(add_to_element.iter("noscript", "TransactionBenchmark"))
 
         for m in self.messages:
             add_to_element.append(m.to_xml())
@@ -3221,6 +3386,13 @@ class Container(list):
                 elem = e.to_xml()
             add_to_element.append(elem)
 
+        # remove noscript and benchmark elements added by this function
+        for elem in list(add_to_element.iter("noscript", "TransactionBenchmark")):
+            if elem not in noscript_in_supplied_xml:
+                parent = elem.getparent()
+                if parent is not None:
+                    parent.remove(elem)
+
         return add_to_element
 
     def get_errors(self):
@@ -3579,6 +3751,37 @@ class Container(list):
 
         return sync_dict
 
+    def filter_by_identity(self, entity: Optional[Entity] = None,
+                           entity_id: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
+        """
+        Return all Entities from this Container that match the selection criteria.
+
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
+        description of behaviour.
+
+        Params
+        ------
+        entity            : Entity
+                            Entity to match name and ID with
+        entity_id         : str, int
+                            Parent ID to match
+        name              : str
+                            Parent name to match
+                            simultaneously with ID or name.
+        conjunction       : bool, defaults to False
+                            Set to return only entities that match both id and name
+                            if both are given.
+
+        Returns
+        -------
+        matches          : list
+                           List containing all matching Entities
+        """
+        return _filter_entity_list_by_identity(self, pid=entity_id, name=name, entity=entity,
+                                               conjunction=conjunction)
+
     @staticmethod
     def _find_dependencies_in_container(container: Container):
         """Find elements in a container that are a dependency of another element of the same.
@@ -4071,13 +4274,21 @@ class Container(list):
         warnings as errors.  This prevents the server from inserting this entity if any warning
         occurs.
 
-        @param strict=False: Flag for strict mode.
-        @param sync=True: synchronize this container with the response from the server. Otherwise,
-                          this method returns a new container with the inserted entities and leaves
-                          this container untouched.
-        @param unique=True: Flag for unique mode. If set to True, the server will check if the name
-                            of the entity is unique. If not, the server will return an error.
-        @param flags=None: Additional flags for the server.
+        Parameters
+        ----------
+        strict : bool, optional
+            Flag for strict mode. Default is False.
+        sync : bool, optional
+            synchronize this container with the response from the
+            server. Otherwise, this method returns a new container with the
+            inserted entities and leaves this container untouched. Default is
+            True.
+        unique : bool, optional
+            Flag for unique mode. If set to True, the server will check if the
+            name of the entity is unique. If not, the server will return an
+            error. Default is True.
+        flags : dict, optional
+            Additional flags for the server. Default is None.
 
         """
 
@@ -4449,7 +4660,7 @@ class ACL():
         return len(self._grants) + len(self._priority_grants) + \
             len(self._priority_denials) + len(self._denials) == 0
 
-    def clear(self):
+    def clear(self) -> None:
         self._grants: set[ACI] = set()
         self._denials: set[ACI] = set()
         self._priority_grants: set[ACI] = set()
@@ -4791,7 +5002,7 @@ class Query():
             self.etag = q.get("etag")
 
             for m in q:
-                if m.tag.lower() == 'warning' or m.tag.lower() == 'error':
+                if str(m.tag).lower() == 'warning' or str(m.tag).lower() == 'error':
                     self.messages.append(_parse_single_xml_element(m))
         else:
             self.q = q
@@ -4955,8 +5166,8 @@ def execute_query(
         Otherwise, paging is disabled, as well as for count queries and
         when unique is True. Defaults to None.
 
-    Raises:
-    -------
+    Raises
+    ------
     PagingConsistencyError
         If the database state changed between paged requests.
 
@@ -4998,13 +5209,13 @@ class DropOffBox(list):
         xml = etree.fromstring(body)
 
         for child in xml:
-            if child.tag.lower() == "stats":
+            if str(child.tag).lower() == "stats":
                 infoelem = child
 
                 break
 
         for child in infoelem:
-            if child.tag.lower() == "dropoffbox":
+            if str(child.tag).lower() == "dropoffbox":
                 dropoffboxelem = child
 
                 break
@@ -5018,6 +5229,17 @@ class DropOffBox(list):
 
 
 class UserInfo():
+    """User information from a server response.
+
+    Attributes
+    ----------
+    name : str
+        Username
+    realm : str
+        Realm in which this user lives, e.g., CaosDB or LDAP.
+    roles : list[str]
+        List of roles assigned to this user.
+    """
 
     def __init__(self, xml: etree._Element):
         self.roles = [role.text for role in xml.findall("Roles/Role")]
@@ -5026,14 +5248,30 @@ class UserInfo():
 
 
 class Info():
+    """Info about the LinkAhead instance that you are connected to. It has a
+    simple string representation in the form of "Connected to a LinkAhead with N
+    Records".
 
-    def __init__(self):
+    Attributes
+    ----------
+    messages : Messages
+        Collection of messages that the server's ``Info`` response contained.
+    user_info : UserInfo
+        Information about the user that is connected to the server, such as
+        name, realm or roles.
+    time_zone : TimeZone
+        The timezone information returned by the server.
+
+    """
+
+    def __init__(self) -> None:
         self.messages = Messages()
         self.user_info: Optional[UserInfo] = None
         self.time_zone: Optional[TimeZone] = None
         self.sync()
 
     def sync(self):
+        """Retrieve server information from the server's ``Info`` response."""
         c = get_connection()
         try:
             http_response = c.retrieve(["Info"])
@@ -5152,36 +5390,36 @@ def _parse_single_xml_element(elem: etree._Element):
         "entity": Entity,
     }
 
-    if elem.tag.lower() in classmap:
-        klass = classmap.get(elem.tag.lower())
+    if str(elem.tag).lower() in classmap:
+        klass = classmap.get(str(elem.tag).lower())
         if klass is None:
-            raise LinkAheadException("No class for tag '{}' found.".format(elem.tag))
+            raise LinkAheadException("No class for tag '{}' found.".format(str(elem.tag)))
         entity = klass()
         Entity._from_xml(entity, elem)
 
         return entity
-    elif elem.tag.lower() == "version":
+    elif str(elem.tag).lower() == "version":
         return Version.from_xml(elem)
-    elif elem.tag.lower() == "state":
+    elif str(elem.tag).lower() == "state":
         return State.from_xml(elem)
-    elif elem.tag.lower() == "emptystring":
+    elif str(elem.tag).lower() == "emptystring":
         return ""
-    elif elem.tag.lower() == "value":
-        if len(elem) == 1 and elem[0].tag.lower() == "emptystring":
+    elif str(elem.tag).lower() == "value":
+        if len(elem) == 1 and str(elem[0].tag).lower() == "emptystring":
             return ""
-        elif len(elem) == 1 and elem[0].tag.lower() in classmap:
+        elif len(elem) == 1 and str(elem[0].tag).lower() in classmap:
             return _parse_single_xml_element(elem[0])
         elif elem.text is None or elem.text.strip() == "":
             return None
 
         return str(elem.text.strip())
-    elif elem.tag.lower() == "querytemplate":
+    elif str(elem.tag).lower() == "querytemplate":
         return QueryTemplate._from_xml(elem)
-    elif elem.tag.lower() == 'query':
+    elif str(elem.tag).lower() == 'query':
         return Query(elem)
-    elif elem.tag.lower() == 'history':
+    elif str(elem.tag).lower() == 'history':
         return Message(type='History', description=elem.get("transaction"))
-    elif elem.tag.lower() == 'stats':
+    elif str(elem.tag).lower() == 'stats':
         counts = elem.find("counts")
         if counts is None:
             raise LinkAheadException("'stats' element without a 'count' found.")
@@ -5201,7 +5439,7 @@ def _parse_single_xml_element(elem: etree._Element):
     else:
         code = elem.get("code")
         return Message(
-            type=elem.tag,
+            type=str(elem.tag),
             code=int(code) if code is not None else None,
             description=elem.get("description"),
             body=elem.text,
@@ -5349,3 +5587,106 @@ def delete(ids: Union[list[int], range], raise_exception_on_error: bool = True):
         c.append(Entity(id=ids))
 
     return c.delete(raise_exception_on_error=raise_exception_on_error)
+
+
+def _filter_entity_list_by_identity(listobject: list[Entity],
+                                    entity: Optional[Entity] = None,
+                                    pid: Union[None, str, int] = None,
+                                    name: Optional[str] = None,
+                                    conjunction: bool = False) -> list:
+    """
+    Returns a subset of entities from the list based on whether their id and
+    name matches the selection criterion.
+
+    If both pid and name are given, entities from the list are first matched
+    based on id. If they do not have an id, they are matched based on name.
+    If only one parameter is given, only this parameter is considered.
+
+    If an Entity is given, neither name nor ID may be set. In this case, pid
+    and name are determined by the attributes of given entity.
+
+    This results in the following selection criteria:
+    If an entity in the list
+    - has both name and id, it is returned if the id matches the given not-None
+      value for pid. If no pid was given, it is returned if the name matches.
+    - has an id, but no name, it will be returned only if it matches the given
+      not-None value
+    - has no id, but a name, it will be returned if the name matches the given
+      not-None value
+    - has neither id nor name, it will never be returned
+
+    As IDs can be strings, integer IDs are cast to string for the comparison.
+
+    Params
+    ------
+    listobject        : Iterable(Entity)
+                        List to be filtered
+    entity            : Entity
+                        Entity to match name and ID for. Cannot be set
+                        simultaneously with ID or name.
+    pid               : str, int
+                        Entity ID to match
+    name              : str
+                        Entity name to match
+    conjunction       : bool, defaults to False
+                        Set to true to return only entities that match both id
+                        and name if both are given.
+
+    Returns
+    -------
+    matches          : list
+                       A List containing all matching Entities
+    """
+    # Check correct input params and setup
+    if entity is not None:
+        if pid is not None or name is not None:
+            raise ValueError("If an entity is given, pid and name must not be set.")
+        pid = entity.id
+        name = entity.name
+    if pid is None and name is None:
+        if entity is None:
+            raise ValueError("One of entity, pid or name must be set.")
+        else:
+            raise ValueError("A given entity must have at least one of name and id.")
+    if pid is None or name is None:
+        conjunction = False
+
+    # Iterate through list and match based on given criteria
+    matches = []
+    for candidate in listobject:
+        name_match, pid_match = False, False
+
+        # Check whether name/pid match
+        # Comparison is only possible if both are not None
+        pid_none = pid is None or candidate.id is None
+        # Cast to string in case one is f.e. "12" and the other is 12
+        if not pid_none and str(candidate.id) == str(pid):
+            pid_match = True
+        name_none = name is None or candidate.name is None
+        if not name_none and str(candidate.name).lower() == str(name).lower():
+            name_match = True
+
+        # If the criteria are satisfied, append the match.
+        if pid_match and name_match:
+            matches.append(candidate)
+        elif not conjunction:
+            if pid_match:
+                matches.append(candidate)
+            if pid_none and name_match:
+                matches.append(candidate)
+    return matches
+
+
+def value_matches_versionid(value: Union[int, str]):
+    """Returns True if the value matches the pattern <id>@<version>"""
+    if isinstance(value, int):
+        return False
+    if not isinstance(value, str):
+        raise ValueError(f"A reference value needs to be int or str. It was {type(value)}. "
+                         "Did you call value_matches_versionid on a non reference value?")
+    return "@" in value
+
+
+def get_id_from_versionid(versionid: str):
+    """Returns the ID part of the versionid with the pattern <id>@<version>"""
+    return versionid.split("@")[0]
diff --git a/src/linkahead/common/state.py b/src/linkahead/common/state.py
index e352f82d9820620d1692cb6337eb218210e799e6..b708ca13cb0a648aa2ca00507f39a531e4f55d14 100644
--- a/src/linkahead/common/state.py
+++ b/src/linkahead/common/state.py
@@ -20,11 +20,11 @@
 # ** end header
 
 from __future__ import annotations  # Can be removed with 3.10.
-import copy
-from lxml import etree
 
+import copy
 from typing import TYPE_CHECKING
-import sys
+
+from lxml import etree
 
 if TYPE_CHECKING:
     from typing import Optional
@@ -87,7 +87,8 @@ class Transition:
         return self._to_state
 
     def __repr__(self):
-        return f'Transition(name="{self.name}", from_state="{self.from_state}", to_state="{self.to_state}", description="{self.description}")'
+        return (f'Transition(name="{self.name}", from_state="{self.from_state}", '
+                f'to_state="{self.to_state}", description="{self.description}")')
 
     def __eq__(self, other):
         return (
@@ -103,9 +104,9 @@ class Transition:
     @staticmethod
     def from_xml(xml: etree._Element) -> "Transition":
         to_state = [to.get("name")
-                    for to in xml if to.tag.lower() == "tostate"]
+                    for to in xml if str(to.tag).lower() == "tostate"]
         from_state = [
-            from_.get("name") for from_ in xml if from_.tag.lower() == "fromstate"
+            from_.get("name") for from_ in xml if str(from_.tag).lower() == "fromstate"
         ]
         return Transition(
             name=xml.get("name"),
@@ -199,7 +200,7 @@ class State:
         result._id = xml.get("id")
         result._description = xml.get("description")
         transitions = [
-            Transition.from_xml(t) for t in xml if t.tag.lower() == "transition"
+            Transition.from_xml(t) for t in xml if str(t.tag).lower() == "transition"
         ]
         if transitions:
             result._transitions = set(transitions)
diff --git a/src/linkahead/common/versioning.py b/src/linkahead/common/versioning.py
index 2e292e6bb031725fbd6da618c4b888c05072c46b..1c2999df8174e239a470cfc637533c3c8c302c33 100644
--- a/src/linkahead/common/versioning.py
+++ b/src/linkahead/common/versioning.py
@@ -101,11 +101,14 @@ class Version():
     # pylint: disable=redefined-builtin
     def __init__(self, id: Optional[str] = None, date: Optional[str] = None,
                  username: Optional[str] = None, realm: Optional[str] = None,
-                 predecessors: Optional[List[Version]] = None, successors: Optional[List[Version]] = None,
+                 predecessors: Optional[List[Version]] = None,
+                 successors: Optional[List[Version]] = None,
                  is_head: Union[bool, str, None] = False,
                  is_complete_history: Union[bool, str, None] = False):
-        """Typically the `predecessors` or `successors` should not "link back" to an existing Version
-object."""
+        """Typically the `predecessors` or `successors` should not "link back" to an existing
+        Version object.
+
+        """
         self.id = id
         self.date = date
         self.username = username
@@ -205,8 +208,8 @@ object."""
         version : Version
             a new version instance
         """
-        predecessors = [Version.from_xml(p) for p in xml if p.tag.lower() == "predecessor"]
-        successors = [Version.from_xml(s) for s in xml if s.tag.lower() == "successor"]
+        predecessors = [Version.from_xml(p) for p in xml if str(p.tag).lower() == "predecessor"]
+        successors = [Version.from_xml(s) for s in xml if str(s.tag).lower() == "successor"]
         return Version(id=xml.get("id"), date=xml.get("date"),
                        is_head=xml.get("head"),
                        is_complete_history=xml.get("completeHistory"),
diff --git a/src/linkahead/configuration.py b/src/linkahead/configuration.py
index b020467c8c53e26d464a6a2fb473cc912b0e0612..5081c28af253d3da31926ab1c9449309cc171c4f 100644
--- a/src/linkahead/configuration.py
+++ b/src/linkahead/configuration.py
@@ -30,6 +30,15 @@ import yaml
 try:
     optional_jsonschema_validate: Optional[Callable] = None
     from jsonschema import validate as optional_jsonschema_validate
+
+    # Adapted from https://github.com/python-jsonschema/jsonschema/issues/148
+    # Defines Validator to allow parsing of all iterables as array in jsonschema
+    # CustomValidator can be removed if/once jsonschema allows tuples for arrays
+    from collections.abc import Iterable
+    from jsonschema import validators
+    default = validators.validator_for(True)   # Returns latest supported draft
+    t_c = (default.TYPE_CHECKER.redefine('array', lambda x, y: isinstance(y, Iterable)))
+    CustomValidator = validators.extend(default, type_checker=t_c)
 except ImportError:
     pass
 
@@ -72,14 +81,40 @@ def get_config() -> ConfigParser:
     return _pycaosdbconf
 
 
-def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str, bool]]]:
-    valobj: dict[str, dict[str, Union[int, str, bool]]] = {}
+def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str, bool, tuple, None]]]:
+    """
+    Generates and returns a dict with all config options and their values
+    defined in the config.
+    The values of the options 'debug', 'timeout', and 'ssl_insecure' are
+    parsed, all other values are saved as string.
+
+    Parameters
+    ----------
+    config : ConfigParser
+        The config to be converted to a dict
+
+    Returns
+    -------
+    valobj : dict
+        A dict with config options and their values as key value pairs
+    """
+    valobj: dict[str, dict[str, Union[int, str, bool, tuple, None]]] = {}
     for s in config.sections():
         valobj[s] = {}
         for key, value in config[s].items():
             # TODO: Can the type be inferred from the config object?
-            if key in ["timeout", "debug"]:
+            if key in ["debug"]:
                 valobj[s][key] = int(value)
+            elif key in ["timeout"]:
+                value = "".join(value.split())          # Remove whitespace
+                if str(value).lower() in ["none", "null"]:
+                    valobj[s][key] = None
+                elif value.startswith('(') and value.endswith(')'):
+                    content = [None if str(s).lower() in ["none", "null"] else int(s)
+                               for s in value[1:-1].split(',')]
+                    valobj[s][key] = tuple(content)
+                else:
+                    valobj[s][key] = int(value)
             elif key in ["ssl_insecure"]:
                 valobj[s][key] = bool(value)
             else:
@@ -88,11 +123,12 @@ def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str,
     return valobj
 
 
-def validate_yaml_schema(valobj: dict[str, dict[str, Union[int, str, bool]]]):
+def validate_yaml_schema(valobj: dict[str, dict[str, Union[int, str, bool, tuple, None]]]):
     if optional_jsonschema_validate:
         with open(os.path.join(os.path.dirname(__file__), "schema-pycaosdb-ini.yml")) as f:
             schema = yaml.load(f, Loader=yaml.SafeLoader)
-        optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"])
+        optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"],
+                                     cls=CustomValidator)
     else:
         warnings.warn("""
             Warning: The validation could not be performed because `jsonschema` is not installed.
@@ -102,7 +138,7 @@ def validate_yaml_schema(valobj: dict[str, dict[str, Union[int, str, bool]]]):
 def _read_config_files() -> list[str]:
     """Read config files from different paths.
 
-    Read the config from either ``$PYCAOSDBINI`` or home directory (``~/.pylinkahead.ini``), and
+    Read the config from either ``$PYLINKAHEADINI`` or home directory (``~/.pylinkahead.ini``), and
     additionally adds config from a config file in the current working directory
     (``pylinkahead.ini``).
     If deprecated names are used (starting with 'pycaosdb'), those used in addition but the files
@@ -131,15 +167,18 @@ def _read_config_files() -> list[str]:
         warnings.warn("\n\nYou have a config file with the old naming scheme (pycaosdb.ini). "
                       f"Please use the new version and rename\n"
                       f"    {ini_cwd_caosdb}\nto\n    {ini_cwd}", DeprecationWarning)
+    if "PYCAOSDBINI" in environ:
+        warnings.warn("\n\nYou have an environment variable PYCAOSDBINI. "
+                      "Please rename it to PYLINKAHEADINI.")
     # End: LinkAhead rename block ##################################################
 
-    if "PYCAOSDBINI" in environ:
-        if not isfile(expanduser(environ["PYCAOSDBINI"])):
+    if "PYLINKAHEADINI" in environ:
+        if not isfile(expanduser(environ["PYLINKAHEADINI"])):
             raise RuntimeError(
-                f"No configuration file found at\n{expanduser(environ['PYCAOSDBINI'])}"
-                "\nwhich was given via the environment variable PYCAOSDBINI"
+                f"No configuration file found at\n{expanduser(environ['PYLINKAHEADINI'])}"
+                "\nwhich was given via the environment variable PYLINKAHEADINI"
             )
-        return_var.extend(configure(expanduser(environ["PYCAOSDBINI"])))
+        return_var.extend(configure(expanduser(environ["PYLINKAHEADINI"])))
     else:
         if isfile(ini_user_caosdb):
             return_var.extend(configure(ini_user_caosdb))
diff --git a/src/linkahead/connection/authentication/interface.py b/src/linkahead/connection/authentication/interface.py
index b48e27c08312bf1358d32a9a1203627a9d0007c2..8288880583dc58fc82ab03d371861f067406b3d3 100644
--- a/src/linkahead/connection/authentication/interface.py
+++ b/src/linkahead/connection/authentication/interface.py
@@ -125,8 +125,9 @@ class AbstractAuthenticator(ABC):
         Returns
         -------
         """
-        self.auth_token = parse_auth_token(
-            response.getheader("Set-Cookie"))
+        new_token = parse_auth_token(response.getheader("Set-Cookie"))
+        if new_token is not None:
+            self.auth_token = new_token
 
     def on_request(self, method: str, path: str, headers: QueryDict, **kwargs):
         # pylint: disable=unused-argument
@@ -190,7 +191,7 @@ class CredentialsAuthenticator(AbstractAuthenticator):
     def _logout(self):
         self.logger.debug("[LOGOUT]")
         if self.auth_token is not None:
-            self._connection.request(method="DELETE", path="logout")
+            self._connection.request(method="GET", path="logout")
         self.auth_token = None
 
     def _login(self):
diff --git a/src/linkahead/connection/connection.py b/src/linkahead/connection/connection.py
index 294d9457d064f03bbe06a3347b2d2064dcf12b8c..fe99b421ee9d5bc3bc158af6b7f4882232db4d97 100644
--- a/src/linkahead/connection/connection.py
+++ b/src/linkahead/connection/connection.py
@@ -39,7 +39,7 @@ from requests.adapters import HTTPAdapter
 from requests.exceptions import ConnectionError as HTTPConnectionError
 from urllib3.poolmanager import PoolManager
 
-from ..configuration import get_config
+from ..configuration import get_config, config_to_yaml
 from ..exceptions import (ConfigurationError, HTTPClientError,
                           HTTPForbiddenError, HTTPResourceNotFoundError,
                           HTTPServerError, HTTPURITooLongError,
@@ -47,7 +47,7 @@ from ..exceptions import (ConfigurationError, HTTPClientError,
                           LoginFailedError)
 
 try:
-    from ..version import version
+    from ..version import version               # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
 
@@ -56,11 +56,12 @@ from .interface import CaosDBHTTPResponse, CaosDBServerConnection
 from .utils import make_uri_path, urlencode
 
 from typing import TYPE_CHECKING
+from .authentication.interface import CredentialsAuthenticator
 if TYPE_CHECKING:
     from typing import Optional, Any, Iterator, Union
     from requests.models import Response
-    from ssl import _SSLMethod
-    from .authentication.interface import AbstractAuthenticator, CredentialsAuthenticator
+    from ssl import _SSLMethod              # pylint: disable=no-name-in-module
+    from .authentication.interface import AbstractAuthenticator
 
 
 _LOGGER = logging.getLogger(__name__)
@@ -83,8 +84,10 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
         return self.response.status_code
 
     def read(self, size: Optional[int] = None):
+        # FIXME This function behaves unexpectedly if `size` is larger than in the first run.
+
         if self._stream_consumed is True:
-            raise RuntimeError("Stream is consumed")
+            raise BufferError("Stream is consumed")
 
         if self._buffer is None:
             # the buffer has been drained in the previous call.
@@ -97,14 +100,14 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
             return self.response.content
 
         if size is None or size == 0:
-            raise RuntimeError(
-                "size parameter should not be None if the stream is not consumed yet")
+            raise BufferError(
+                "`size` parameter can not be None or zero once reading has started with a non-zero "
+                "value.")
 
         if len(self._buffer) >= size:
             # still enough bytes in the buffer
-            # FIXME: `chunk`` is used before definition
-            result = chunk[:size]
-            self._buffer = chunk[size:]
+            result = self._buffer[:size]
+            self._buffer = self._buffer[size:]
             return result
 
         if self._generator is None:
@@ -116,16 +119,16 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
         try:
             # read new data into the buffer
             chunk = self._buffer + next(self._generator)
-            result = chunk[:size]
+            result = chunk[:size]  # FIXME what if `size` is larger than at `iter_content(size)`?
             if len(result) == 0:
                 self._stream_consumed = True
             self._buffer = chunk[size:]
             return result
         except StopIteration:
             # drain buffer
-            result = self._buffer
+            last_result = self._buffer
             self._buffer = None
-            return result
+            return last_result
 
     def getheader(self, name: str, default=None):
         return self.response.headers[name] if name in self.response.headers else default
@@ -218,7 +221,7 @@ class _DefaultCaosDBServerConnection(CaosDBServerConnection):
                 "Connection failed. Network or server down? " + str(conn_err)
             )
 
-    def configure(self, **config):
+    def configure(self, **config) -> None:
         """configure.
 
         Configure the http connection.
@@ -420,8 +423,10 @@ def configure_connection(**kwargs):
         - "keyring"  Uses the `keyring` library.
         - "auth_token" Uses only a given auth_token.
 
-    timeout : int
+    timeout : int, tuple, or None
         A connection timeout in seconds. (Default: 210)
+        If a tuple is given, they are used as connect and read timeouts
+        respectively, timeout None disables the timeout.
 
     ssl_insecure : bool
         Whether SSL certificate warnings should be ignored. Only use this for
@@ -463,21 +468,29 @@ def configure_connection(**kwargs):
     global_conf = {}
     conf = get_config()
     # Convert config to dict, with preserving types
-    int_opts = ["timeout"]
+    int_opts = []
     bool_opts = ["ssl_insecure"]
+    other_opts = ["timeout"]
 
     if conf.has_section("Connection"):
         global_conf = dict(conf.items("Connection"))
-        # Integer options
 
+        # Integer options
         for opt in int_opts:
             if opt in global_conf:
                 global_conf[opt] = conf.getint("Connection", opt)
-        # Boolean options
 
+        # Boolean options
         for opt in bool_opts:
             if opt in global_conf:
                 global_conf[opt] = conf.getboolean("Connection", opt)
+
+        # Other options, defer parsing to configuration.config_to_yaml:
+        connection_config = config_to_yaml(conf)["Connection"]
+        for opt in other_opts:
+            if opt in global_conf:
+                global_conf[opt] = connection_config[opt]
+
     local_conf = _make_conf(_DEFAULT_CONF, global_conf, kwargs)
 
     connection = _Connection.get_instance()
@@ -551,9 +564,9 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
 
     __instance = None
 
-    def __init__(self):
+    def __init__(self) -> None:
         self._delegate_connection: Optional[CaosDBServerConnection] = None
-        self._authenticator: Optional[CredentialsAuthenticator] = None
+        self._authenticator: Optional[AbstractAuthenticator] = None
         self.is_configured = False
 
     @classmethod
@@ -563,7 +576,7 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
 
         return cls.__instance
 
-    def configure(self, **config):
+    def configure(self, **config) -> _Connection:
         self.is_configured = True
 
         if "implementation" not in config:
@@ -571,8 +584,7 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
                 "Missing CaosDBServerConnection implementation. You did not "
                 "specify an `implementation` for the connection.")
         try:
-            self._delegate_connection: CaosDBServerConnection = config["implementation"](
-            )
+            self._delegate_connection = config["implementation"]()
 
             if not isinstance(self._delegate_connection,
                               CaosDBServerConnection):
@@ -757,9 +769,12 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
 
         Shortcut for: get_connection()._authenticator._credentials_provider.username
         """
+        warnings.warn("Deprecated. Please use ``la.Info().user_info.name`` instead.",
+                      DeprecationWarning)
         if self._authenticator is None:
             raise ValueError(
                 "No authenticator set. Please call configure_connection() first.")
+        assert isinstance(self._authenticator, CredentialsAuthenticator)
         if self._authenticator._credentials_provider is None:
             raise ValueError(
                 "No credentials provider set. Please call configure_connection() first.")
diff --git a/src/linkahead/connection/encode.py b/src/linkahead/connection/encode.py
index a76197803c9652e2d0c4e32819ee3e3f97758bfc..0cbb0b69f0a7b50244eb54c8dea7ef43ae713894 100644
--- a/src/linkahead/connection/encode.py
+++ b/src/linkahead/connection/encode.py
@@ -384,7 +384,7 @@ class MultipartYielder(object):
 
     # since python 3
     def __next__(self):
-        return self.next()
+        return self.next()                     # pylint: disable=not-callable
 
     def next(self):
         """generator function to yield multipart/form-data representation of
diff --git a/src/linkahead/connection/mockup.py b/src/linkahead/connection/mockup.py
index 9b69971c0409708f221c402f540fac85ff9c527e..d3bc13bb474a70d48446e8532607c3e11931ff05 100644
--- a/src/linkahead/connection/mockup.py
+++ b/src/linkahead/connection/mockup.py
@@ -75,7 +75,7 @@ class MockUpServerConnection(CaosDBServerConnection):
     just returns predefined responses which mimic the LinkAhead server."""
 
     def __init__(self):
-        self.resources = [self._login]
+        self.resources = [self._login, self._logout]
 
     def _login(self, method, path, headers, body):
         if method == "POST" and path == "login":
@@ -84,6 +84,12 @@ class MockUpServerConnection(CaosDBServerConnection):
                                            "mockup-auth-token"},
                                   body="")
 
+    def _logout(self, method, path, headers, body):
+        if method in ["DELETE", "GET"] and path == "logout":
+            return MockUpResponse(200,
+                                  headers={},
+                                  body="")
+
     def configure(self, **kwargs):
         """This configure method does nothing."""
 
diff --git a/src/linkahead/exceptions.py b/src/linkahead/exceptions.py
index 609d3654ac670a993185ba1faa33db921c44409c..7d4dc0850b811c0d696cc66252aa62541c6d3029 100644
--- a/src/linkahead/exceptions.py
+++ b/src/linkahead/exceptions.py
@@ -94,12 +94,26 @@ class HTTPServerError(LinkAheadException):
     """HTTPServerError represents 5xx HTTP server errors."""
 
     def __init__(self, body):
-        xml = etree.fromstring(body)
-        error = xml.xpath('/Response/Error')[0]
-        msg = error.get("description")
-
-        if error.text is not None:
-            msg = msg + "\n\n" + error.text
+        try:
+            # This only works if the server sends a valid XML
+            # response. Then it can be parsed for more information.
+            xml = etree.fromstring(body)
+            if xml.xpath('/Response/Error'):
+                error = xml.xpath('/Response/Error')[0]
+                msg = error.get("description") if error.get("description") is not None else ""
+
+                if error.text is not None:
+                    if msg:
+                        msg = msg + "\n\n" + error.text
+                    else:
+                        msg = error.text
+            else:
+                # Valid XML, but no error information
+                msg = body
+        except etree.XMLSyntaxError:
+            # Handling of incomplete responses, e.g., due to timeouts,
+            # c.f. https://gitlab.com/linkahead/linkahead-pylib/-/issues/87.
+            msg = body
         LinkAheadException.__init__(self, msg)
 
 
diff --git a/src/linkahead/high_level_api.py b/src/linkahead/high_level_api.py
index 18d219c732672d16d0ab43e562cfe73d682614fe..9aa59fb9187ff47e71c412568af50e1031c42fb7 100644
--- a/src/linkahead/high_level_api.py
+++ b/src/linkahead/high_level_api.py
@@ -26,11 +26,12 @@
 # type: ignore
 """
 A high level API for accessing LinkAhead entities from within python.
+This module is experimental, and may be changed or removed in the future.
 
 This is refactored from apiutils.
 """
 
-import warnings
+import logging
 from dataclasses import dataclass, fields
 from datetime import datetime
 from typing import Any, Dict, List, Optional, Union
@@ -44,7 +45,10 @@ from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER,
                               REFERENCE, TEXT, get_list_datatype,
                               is_list_datatype, is_reference)
 
-warnings.warn("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
+logger = logging.getLogger(__name__)
+
+
+logger.warning("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
 removed in the future. Its purpose is to give an impression on how the Python client user interface
 might be changed.""")
 
diff --git a/src/linkahead/schema-pycaosdb-ini.yml b/src/linkahead/schema-pycaosdb-ini.yml
index 89ce98570738fdd29dba81de25a2c022c1581467..ae46b905c62d2ab168229d92ff138937279c7aed 100644
--- a/src/linkahead/schema-pycaosdb-ini.yml
+++ b/src/linkahead/schema-pycaosdb-ini.yml
@@ -67,7 +67,13 @@ schema-pycaosdb-ini:
           description: This option is used internally and for testing. Do not override.
           examples: [_DefaultCaosDBServerConnection]
         timeout:
-          type: integer
+          oneOf:
+            - type: [integer, "null"]
+            - type: array
+              items:
+                type: [integer, "null"]
+              minItems: 2
+              maxItems: 2
       allOf:
         - if:
             properties:
diff --git a/src/linkahead/utils/create_revision.py b/src/linkahead/utils/create_revision.py
index 5f6ecc8148859d0ee0908412ff80d20d465cdb25..cde4bae5b0d919977d220b2c35896dcb20e933e7 100644
--- a/src/linkahead/utils/create_revision.py
+++ b/src/linkahead/utils/create_revision.py
@@ -34,15 +34,15 @@ def bend_references(from_id, to_id, except_for=None):
     and those references are changed to point to to_id.
     entities having an id listed in except_for are excluded.
 
-Parameters
-----------
+    Parameters
+    ----------
 
-from_id : int
-  the old object to which references where pointing
-to_id : int
-  the new object to which references will be pointing
-except_for : list of int
-  entities with id of this list will not be changed
+    from_id : int
+      the old object to which references where pointing
+    to_id : int
+      the new object to which references will be pointing
+    except_for : list of int
+      entities with id of this list will not be changed
     """
     if except_for is None:
         except_for = [to_id]
@@ -73,16 +73,16 @@ def create_revision(old_id, prop, value):
     This function changes the record with id old_id. The value of the
     propertye prop is changed to value.
 
-Parameters
-----------
+    Parameters
+    ----------
 
-old_id : int
-    id of the record to be changed
-prop : string
-    name of the property to be changed
-value : type of corresponding property
-    the new value of the corresponding property
-"""
+    old_id : int
+        id of the record to be changed
+    prop : string
+        name of the property to be changed
+    value : type of corresponding property
+        the new value of the corresponding property
+    """
     record = db.execute_query("FIND {}".format(old_id))[0]
     new_rec = record.copy()
     new_rec.get_property(prop).value = value
diff --git a/src/linkahead/utils/get_entity.py b/src/linkahead/utils/get_entity.py
index f84dc107e275390e53c6127834f53e5e5c6521cd..dd91cdc27b3f6adb52ddef36a59d1a0965fb662e 100644
--- a/src/linkahead/utils/get_entity.py
+++ b/src/linkahead/utils/get_entity.py
@@ -30,7 +30,13 @@ from .escape import escape_squoted_text
 def get_entity_by_name(name: str, role: Optional[str] = None) -> Entity:
     """Return the result of a unique query that uses the name to find the correct entity.
 
-    Submits the query "FIND ENTITY WITH name='{name}'".
+    Submits the query "FIND {role} WITH name='{name}'".
+
+    Parameters
+    ----------
+
+    role: str, optional
+      The role for the query, defaults to ``ENTITY``.
     """
     name = escape_squoted_text(name)
     if role is None:
@@ -42,7 +48,13 @@ def get_entity_by_name(name: str, role: Optional[str] = None) -> Entity:
 def get_entity_by_id(eid: Union[str, int], role: Optional[str] = None) -> Entity:
     """Return the result of a unique query that uses the id to find the correct entity.
 
-    Submits the query "FIND ENTITY WITH id='{eid}'".
+    Submits the query "FIND {role} WITH id='{eid}'".
+
+    Parameters
+    ----------
+
+    role: str, optional
+      The role for the query, defaults to ``ENTITY``.
     """
     if role is None:
         role = "ENTITY"
@@ -53,7 +65,13 @@ def get_entity_by_id(eid: Union[str, int], role: Optional[str] = None) -> Entity
 def get_entity_by_path(path: str) -> Entity:
     """Return the result of a unique query that uses the path to find the correct file.
 
-    Submits the query "FIND FILE WHICH IS STORED AT '{path}'".
+    Submits the query "FIND {role} WHICH IS STORED AT '{path}'".
+
+    Parameters
+    ----------
+
+    role: str, optional
+      The role for the query, defaults to ``ENTITY``.
     """
     # type hint can be ignored, it's a unique query
     return execute_query(f"FIND FILE WHICH IS STORED AT '{path}'", unique=True)  # type: ignore
diff --git a/src/linkahead/utils/git_utils.py b/src/linkahead/utils/git_utils.py
index 7a58272a3bef1930f75a1e08364349388e2bb89f..4824d619bfc77925add0c383f72360a644dd7833 100644
--- a/src/linkahead/utils/git_utils.py
+++ b/src/linkahead/utils/git_utils.py
@@ -36,9 +36,9 @@ logger = logging.getLogger(__name__)
 
 def get_origin_url_in(folder: str):
     """return the Fetch URL of the git repository in the given folder."""
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "remote", "show", "origin"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w", encoding="utf8") as tempf:
+        call(["git", "remote", "show", "origin"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r", encoding="utf8") as t:
         urlString = "Fetch URL:"
 
         for line in t.readlines():
@@ -63,9 +63,9 @@ def get_branch_in(folder: str):
     The command "git branch" is called in the given folder and the
     output is returned
     """
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w") as tempf:
+        call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r") as t:
         return t.readline().strip()
 
 
@@ -76,7 +76,7 @@ def get_commit_in(folder: str):
     and the output is returned
     """
 
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "log", "-1", "--format=%h"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w") as tempf:
+        call(["git", "log", "-1", "--format=%h"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r") as t:
         return t.readline().strip()
diff --git a/src/linkahead/utils/linkahead_admin.py b/src/linkahead/utils/linkahead_admin.py
index f7e3b8b63f18e37e6210f2aa03f34ce5b0f688d4..ca5f3c01e0bbe95fe712761ec7f443ec88d406fd 100755
--- a/src/linkahead/utils/linkahead_admin.py
+++ b/src/linkahead/utils/linkahead_admin.py
@@ -33,7 +33,7 @@ from argparse import ArgumentParser, RawDescriptionHelpFormatter
 
 import linkahead as db
 from linkahead import administration as admin
-from linkahead.exceptions import HTTPClientError
+from linkahead.exceptions import HTTPClientError, HTTPResourceNotFoundError, HTTPForbiddenError
 
 __all__ = []
 __version__ = 0.3
@@ -42,19 +42,42 @@ __updated__ = '2018-12-11'
 
 
 def do_update_role(args):
-    admin._update_role(name=args.role_name, description=args.role_description)
+    """
+    Update the description of a role.
+
+    Allowed keyword arguments:
+    role_name: Name of the role to update
+    role_description: New description of the role
+    """
+    try:
+        admin._update_role(name=args.role_name, description=args.role_description)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot update role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_create_role(args):
-    admin._insert_role(name=args.role_name, description=args.role_description)
+    try:
+        admin._insert_role(name=args.role_name, description=args.role_description)
+    except (HTTPClientError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot create role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve_role(args):
-    print(admin._retrieve_role(name=args.role_name))
+    try:
+        print(admin._retrieve_role(name=args.role_name))
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot retrieve role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_delete_role(args):
-    admin._delete_role(name=args.role_name)
+    try:
+        admin._delete_role(name=args.role_name)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot delete role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve(args):
@@ -123,25 +146,27 @@ def do_create_user(args):
     try:
         admin._insert_user(name=args.user_name,
                            email=args.user_email, password=password)
-
         if args.activate_user:
             do_activate_user(args)
-    except HTTPClientError as e:
-        print(e.msg)
+    except (HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot create user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_activate_user(args):
     try:
         admin._update_user(name=args.user_name, status="ACTIVE")
-    except HTTPClientError as e:
-        print(e.msg)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot activate user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_deactivate_user(args):
     try:
         admin._update_user(name=args.user_name, status="INACTIVE")
-    except HTTPClientError as e:
-        print(e.msg)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot deactivate user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_set_user_password(args):
@@ -150,58 +175,110 @@ def do_set_user_password(args):
     else:
         password = args.user_password
     try:
-        admin._update_user(name=args.user_name, password=password)
-    except HTTPClientError as e:
-        print(e.msg)
+        admin._update_user(name=args.user_name, password=password, realm=args.realm)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot set password for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_add_user_roles(args):
-    roles = admin._get_roles(username=args.user_name, realm=None)
+    try:
+        roles = admin._get_roles(username=args.user_name, realm=None)
+    except (HTTPForbiddenError, HTTPResourceNotFoundError) as e:
+        print(f"Error: Cannot access roles for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
+        return
 
     for r in args.user_roles:
         roles.add(r)
-    admin._set_roles(username=args.user_name, roles=roles)
+    try:
+        admin._set_roles(username=args.user_name, roles=roles)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot add new roles for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_remove_user_roles(args):
-    roles = admin._get_roles(username=args.user_name, realm=None)
+    try:
+        roles = admin._get_roles(username=args.user_name, realm=None)
+    except (HTTPForbiddenError, HTTPResourceNotFoundError) as e:
+        print(f"Error: Cannot access roles for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
+        return
 
     for r in args.user_roles:
         if r in roles:
             roles.remove(r)
-    admin._set_roles(username=args.user_name, roles=roles)
+    try:
+        admin._set_roles(username=args.user_name, roles=roles)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot remove roles from user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_set_user_entity(args):
-    admin._update_user(name=args.user_name, entity=args.user_entity)
+    try:
+        admin._update_user(name=args.user_name, entity=args.user_entity)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot set entity for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_reset_user_entity(args):
-    admin._update_user(name=args.user_name, entity="")
+    try:
+        admin._update_user(name=args.user_name, entity="")
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot remove entity for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_set_user_email(args):
-    admin._update_user(name=args.user_name, email=args.user_email)
+    try:
+        admin._update_user(name=args.user_name, email=args.user_email)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError, HTTPClientError) as e:
+        print(f"Error: Cannot set email for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve_user(args):
-    print(admin._retrieve_user(name=args.user_name))
+    try:
+        print(admin._retrieve_user(name=args.user_name))
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot retrieve user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_delete_user(args):
-    admin._delete_user(name=args.user_name)
+    try:
+        admin._delete_user(name=args.user_name)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot delete user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve_user_roles(args):
-    print(admin._get_roles(username=args.user_name))
+    try:
+        print(admin._get_roles(username=args.user_name))
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot retrieve roles for user '{args.user_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve_role_permissions(args):
-    print(admin._get_permissions(role=args.role_name))
+    try:
+        print(admin._get_permissions(role=args.role_name))
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot retrieve permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_grant_role_permissions(args):
-    perms = admin._get_permissions(args.role_name)
+    try:
+        perms = admin._get_permissions(role=args.role_name)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot access permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
+        return
 
     for p in args.role_permissions:
         g = admin.PermissionRule(
@@ -215,11 +292,20 @@ def do_grant_role_permissions(args):
         if d in perms:
             perms.remove(d)
         perms.add(g)
-    admin._set_permissions(role=args.role_name, permission_rules=perms)
+    try:
+        admin._set_permissions(role=args.role_name, permission_rules=perms)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot set permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_revoke_role_permissions(args):
-    perms = admin._get_permissions(args.role_name)
+    try:
+        perms = admin._get_permissions(role=args.role_name)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot access permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
+        return
 
     for p in args.role_permissions:
         g = admin.PermissionRule(
@@ -232,11 +318,20 @@ def do_revoke_role_permissions(args):
 
         if d in perms:
             perms.remove(d)
-    admin._set_permissions(role=args.role_name, permission_rules=perms)
+    try:
+        admin._set_permissions(role=args.role_name, permission_rules=perms)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot revoke permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_deny_role_permissions(args):
-    perms = admin._get_permissions(args.role_name)
+    try:
+        perms = admin._get_permissions(role=args.role_name)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot access permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
+        return
 
     for p in args.role_permissions:
         g = admin.PermissionRule(
@@ -250,7 +345,11 @@ def do_deny_role_permissions(args):
         if d in perms:
             perms.remove(d)
         perms.add(d)
-    admin._set_permissions(role=args.role_name, permission_rules=perms)
+    try:
+        admin._set_permissions(role=args.role_name, permission_rules=perms)
+    except (HTTPResourceNotFoundError, HTTPForbiddenError) as e:
+        print(f"Error: Cannot deny permissions for role '{args.role_name}', "
+              f"reason: '{e.msg}'")
 
 
 def do_retrieve_entity_acl(args):
@@ -364,6 +463,12 @@ USAGE
         metavar='USERNAME',
         dest="user_name",
         help="The name of the user who's password is to be set.")
+    subparser.add_argument(
+        metavar='REALM',
+        dest="realm",
+        nargs="?",
+        default=None,
+        help="The realm of the user who's password is to be set.")
     subparser.add_argument(
         metavar='PASSWORD',
         nargs="?",
diff --git a/src/linkahead/utils/plantuml.py b/src/linkahead/utils/plantuml.py
index 19594d6e856e740fe2c58c5128eead31c37485ce..59e3c34dd04c2425aef46b6d9e2411f75b747aca 100644
--- a/src/linkahead/utils/plantuml.py
+++ b/src/linkahead/utils/plantuml.py
@@ -130,9 +130,9 @@ def recordtypes_to_plantuml_string(iterable,
 
     classes = [el for el in iterable
                if isinstance(el, db.RecordType)]
-    dependencies = {}
-    inheritances = {}
-    properties = [p for p in iterable if isinstance(p, db.Property)]
+    dependencies: dict = {}
+    inheritances: dict = {}
+    properties: list = [p for p in iterable if isinstance(p, db.Property)]
     grouped = [g for g in iterable if isinstance(g, Grouped)]
 
     def _add_properties(c, importance=None):
@@ -272,7 +272,8 @@ package \"The property P references an instance of D\" <<Rectangle>> {
     return result
 
 
-def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None, cleanup=True):
+def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None,
+                          cleanup=True):
     """Recursively retrieves LinkAhead record types and properties, starting
     from given initial types up to a specific depth.
 
diff --git a/src/linkahead/utils/register_tests.py b/src/linkahead/utils/register_tests.py
index 6909544fed5a6f80572f60ba102c72b53568d897..66fd4553346075fc77aa7b1f6003d26d9967c223 100644
--- a/src/linkahead/utils/register_tests.py
+++ b/src/linkahead/utils/register_tests.py
@@ -18,44 +18,62 @@
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-import linkahead as db
-from linkahead import administration as admin
-
-"""
-This module implements a registration procedure for integration tests which
+"""This module implements a registration procedure for integration tests which
 need a running LinkAhead instance.
 
-It ensures that tests do not accidentally overwrite data in real LinkAhead
-instances, as it checks whether the running LinkAhead instance is actually the
-correct one, that
-should be used for these tests.
-
-The test files have to define a global variable TEST_KEY which must be unique
-for each test using
+It ensures that tests do not accidentally overwrite data in real
+LinkAhead instances, as it checks whether the running LinkAhead
+instance is actually the correct one, that should be used for these
+tests.
 
-set_test_key("ABCDE")
+The test files have to define a global variable ``TEST_KEY`` which
+must be unique for each test using
+:py:meth:`~linkahead.utils.register_tests.set_test_key`.
 
 The test procedure (invoked by pytest) checks whether a registration
 information is stored in one of the server properties or otherwise
-- offers to register this test in the currently running database ONLY if this
-  is empty.
+
+- offers to register this test in the currently running database ONLY if this is
+  empty.
 - fails otherwise with a RuntimeError
 
-NOTE: you probably need to use pytest with the -s option to be able to
-      register the test interactively. Otherwise, the server property has to be
-      set before server start-up in the server.conf of the LinkAhead server.
+.. note::
+
+    you probably need to use pytest with the -s option to be able to
+    register the test interactively. Otherwise, the server property
+    has to be set before server start-up in the server.conf of the
+    LinkAhead server.
 
 This module is intended to be used with pytest.
 
-There is a pytest fixture "clear_database" that performs the above mentioned
-checks and clears the database in case of success.
+There is a pytest fixture
+:py:meth:`~linkahead.utils.register_tests.clear_database` that
+performs the above mentioned checks and clears the database in case of
+success.
+
 """
 
+import linkahead as db
+from linkahead import administration as admin
+
 TEST_KEY = None
 
 
-def set_test_key(KEY):
+def set_test_key(KEY: str):
+    """Set the global ``TEST_KEY`` variable to `KEY`. Afterwards, if
+    `KEY` matches the ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server
+    environment variable, mehtods like :py:meth:`clear_database` can
+    be used. Call this function in the beginning of your test file.
+
+    Parameters
+    ----------
+    KEY : str
+        key with which the test using this function is registered and
+        which is checked against the
+        ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server environment
+        variable.
+
+    """
     global TEST_KEY
     TEST_KEY = KEY
 
@@ -122,10 +140,14 @@ try:
 
     @pytest.fixture
     def clear_database():
-        """Remove Records, RecordTypes, Properties, and Files ONLY IF the LinkAhead
-        server the current connection points to was registered with the appropriate key.
+        """Remove Records, RecordTypes, Properties, and Files ONLY IF
+        the LinkAhead server the current connection points to was
+        registered with the appropriate key using
+        :py:meth:`set_test_key`.
+
+        PyTestInfo Records and the corresponding RecordType and
+        Property are preserved.
 
-        PyTestInfo Records and the corresponding RecordType and Property are preserved.
         """
         _assure_test_is_registered()
         yield _clear_database()  # called before the test function
diff --git a/src/linkahead/utils/server_side_scripting.py b/src/linkahead/utils/server_side_scripting.py
index 06caa3d94a629e368dc99f83dc2957c756b7b487..867155cf1f93bf1936e9b19f14926726f362edaf 100644
--- a/src/linkahead/utils/server_side_scripting.py
+++ b/src/linkahead/utils/server_side_scripting.py
@@ -99,6 +99,19 @@ def _make_request(call, pos_args, opts, files=None):
 
 def run_server_side_script(call, *args, files=None, **kwargs):
     """
+    Parameters
+    ----------
+    call : str
+        name of the script to be called, potentially with path prefix (e.g. ``management/update.py``)
+    *args : list(str)
+        list of positional arguments
+    files : dict
+        dictionary with where keys are the argument names with prefix (e.g. ``-p1`` or ``-Ofile``) and
+        the values are the paths to the files to be uploaded. Note, that only the base name will be
+        used when uploaded. Files will be placed in the ``.upload_files`` folder. Thus, the script
+        will be called with the argument ``<key>=.upload_files/<basename>``.
+    **kwargs : dict
+        kwargs will be passed to ``_make_request``
 
     Return
     ------
diff --git a/tox.ini b/tox.ini
index bbaaa1fc9eec2aba87c247d783818d215d8a7d5e..c63555c27b73224106109c1f675e9525d9e89b74 100644
--- a/tox.ini
+++ b/tox.ini
@@ -9,7 +9,7 @@ deps = .
     mypy
     jsonschema>=4.4.0
     setuptools
-commands=py.test --cov=caosdb -vv {posargs}
+commands=py.test --cov=linkahead -vv {posargs}
 
 [flake8]
 max-line-length=100
@@ -17,6 +17,6 @@ max-line-length=100
 [pytest]
 testpaths = unittests
 xfail_strict = True
-addopts = -x -vv --cov=caosdb
+addopts = -x -vv --cov=linkahead
 pythonpath = src
 
diff --git a/unittests/test_apiutils.py b/unittests/test_apiutils.py
index 4705f19a1bdfbc4358790f787f2dce9ea97fee48..6667089abc2d16e59bd97d16f7d0fe75d07afe1b 100644
--- a/unittests/test_apiutils.py
+++ b/unittests/test_apiutils.py
@@ -1,6 +1,7 @@
 #
 # This file is a part of the LinkAhead Project.
 #
+# Copyright (C) 2024 Alexander Schlemmer <a.schlemmer@indiscale.com>
 # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com>
 # Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com>
 # Copyright (C) 2022 Daniel Hornung <d.hornung@indiscale.com>
@@ -25,13 +26,15 @@
 # Test apiutils
 # A. Schlemmer, 02/2018
 
+from io import StringIO
 
 import linkahead as db
 import linkahead.apiutils
 import pytest
 from linkahead.apiutils import (EntityMergeConflictError, apply_to_ids,
                                 compare_entities, create_id_query, empty_diff,
-                                merge_entities, resolve_reference)
+                                merge_entities, resolve_reference,
+                                describe_diff)
 from linkahead.common.models import SPECIAL_ATTRIBUTES
 
 
@@ -96,6 +99,7 @@ def test_resolve_reference():
 
 
 def test_compare_entities():
+    # test compare of parents, properties
     r1 = db.Record()
     r2 = db.Record()
     r1.add_parent("bla")
@@ -111,13 +115,27 @@ def test_compare_entities():
     r2.add_property("tester", )
     r1.add_property("tests_234234", value=45)
     r2.add_property("tests_TT", value=45)
+    r1.add_property("datatype", value=45, datatype=db.INTEGER)
+    r2.add_property("datatype", value=45)
+    r1.add_property("entity_id", value=2)
+    r2.add_property("entity_id", value=24)
+    r1.add_property("entity_mix_e", value=2)
+    r2.add_property("entity_mix_e", value=db.Entity(id=2))
+    r1.add_property("entity_mix_d", value=22)
+    r2.add_property("entity_mix_d", value=db.Entity(id=2))
+    r1.add_property("entity_mix_w", value=22)
+    r2.add_property("entity_mix_w", value=db.Entity())
+    r1.add_property("entity_Ent_e", value=db.Entity(id=2))
+    r2.add_property("entity_Ent_e", value=db.Entity(id=2))
+    r1.add_property("entity_Ent_d", value=db.Entity(id=2))
+    r2.add_property("entity_Ent_d", value=db.Entity(id=22))
 
     diff_r1, diff_r2 = compare_entities(r1, r2)
 
     assert len(diff_r1["parents"]) == 1
     assert len(diff_r2["parents"]) == 0
-    assert len(diff_r1["properties"]) == 4
-    assert len(diff_r2["properties"]) == 4
+    assert len(diff_r1["properties"]) == 11
+    assert len(diff_r2["properties"]) == 11
 
     assert "test" not in diff_r1["properties"]
     assert "test" not in diff_r2["properties"]
@@ -134,13 +152,89 @@ def test_compare_entities():
     assert "tests_234234" in diff_r1["properties"]
     assert "tests_TT" in diff_r2["properties"]
 
+    assert "datatype" in diff_r1["properties"]
+    assert "datatype" in diff_r1["properties"]["datatype"]
+    assert "datatype" in diff_r2["properties"]
+    assert "datatype" in diff_r2["properties"]["datatype"]
+
+    assert "entity_id" in diff_r1["properties"]
+    assert "entity_id" in diff_r2["properties"]
+
+    assert "entity_mix_e" in diff_r1["properties"]
+    assert "entity_mix_e" in diff_r2["properties"]
+    assert "entity_Ent_e" in diff_r1["properties"]
+    assert "entity_Ent_e" in diff_r2["properties"]
+
+    assert "entity_mix_d" in diff_r1["properties"]
+    assert "entity_mix_d" in diff_r2["properties"]
+    assert "entity_mix_w" in diff_r1["properties"]
+    assert "entity_mix_w" in diff_r2["properties"]
+    assert "entity_Ent_d" in diff_r1["properties"]
+    assert "entity_Ent_d" in diff_r2["properties"]
+
+    diff_r1, diff_r2 = compare_entities(r1, r2, compare_referenced_records=True)
+
+    assert len(diff_r1["parents"]) == 1
+    assert len(diff_r2["parents"]) == 0
+    assert len(diff_r1["properties"]) == 10
+    assert len(diff_r2["properties"]) == 10
+
+    assert "entity_id" in diff_r1["properties"]
+    assert "entity_id" in diff_r2["properties"]
+
+    assert "entity_mix_e" in diff_r1["properties"]
+    assert "entity_mix_e" in diff_r2["properties"]
+    assert "entity_mix_w" in diff_r1["properties"]
+    assert "entity_mix_w" in diff_r2["properties"]
+    assert "entity_Ent_e" not in diff_r1["properties"]
+    assert "entity_Ent_e" not in diff_r2["properties"]
+
+    assert "entity_mix_d" in diff_r1["properties"]
+    assert "entity_mix_d" in diff_r2["properties"]
+    assert "entity_Ent_d" in diff_r1["properties"]
+    assert "entity_Ent_d" in diff_r2["properties"]
+
+    diff_r1, diff_r2 = compare_entities(r1, r2,
+                                        entity_name_id_equivalency=True,
+                                        compare_referenced_records=True)
+
+    assert len(diff_r1["parents"]) == 1
+    assert len(diff_r2["parents"]) == 0
+    assert len(diff_r1["properties"]) == 9
+    assert len(diff_r2["properties"]) == 9
+
+    assert "entity_id" in diff_r1["properties"]
+    assert "entity_id" in diff_r2["properties"]
+
+    assert "entity_mix_e" not in diff_r1["properties"]
+    assert "entity_mix_e" not in diff_r2["properties"]
+    assert "entity_mix_w" in diff_r1["properties"]
+    assert "entity_mix_w" in diff_r2["properties"]
+    assert "entity_Ent_e" not in diff_r1["properties"]
+    assert "entity_Ent_e" not in diff_r2["properties"]
+
+    assert "entity_mix_d" in diff_r1["properties"]
+    assert "entity_mix_d" in diff_r2["properties"]
+    assert "entity_Ent_d" in diff_r1["properties"]
+    assert "entity_Ent_d" in diff_r2["properties"]
+
+    r1 = db.Record()
+    r2 = db.Record()
+    r1.add_property(id=20, name="entity_mix_d", value=2, datatype=db.LIST("B"))
+    r2.add_property("entity_mix_d", value=db.Entity())
+
+    diff_r1, diff_r2 = compare_entities(r1, r2, compare_referenced_records=True)
+
+    assert len(diff_r1["properties"]) == 1
+    assert len(diff_r2["properties"]) == 1
+
+    assert "entity_mix_d" in diff_r1["properties"]
+    assert "entity_mix_d" in diff_r2["properties"]
+
 
 def test_compare_entities_units():
     r1 = db.Record()
     r2 = db.Record()
-    r1.add_parent("bla")
-    r2.add_parent("bla")
-    r1.add_parent("lopp")
     r1.add_property("test", value=2, unit="cm")
     r2.add_property("test", value=2, unit="m")
     r1.add_property("tests", value=3, unit="cm")
@@ -152,8 +246,6 @@ def test_compare_entities_units():
 
     diff_r1, diff_r2 = compare_entities(r1, r2)
 
-    assert len(diff_r1["parents"]) == 1
-    assert len(diff_r2["parents"]) == 0
     assert len(diff_r1["properties"]) == 4
     assert len(diff_r2["properties"]) == 4
 
@@ -170,14 +262,229 @@ def test_compare_entities_units():
     assert diff_r2["properties"]["test"]["unit"] == "m"
 
 
+def test_compare_entities_battery():
+    par1, par3 = db.Record(name=""), db.RecordType(name="")
+    r1, r2, r3 = db.Record(), db.Record(), db.Record()
+    prop2 = db.Property(name="Property 2")
+    prop3 = db.Property(name="")
+
+    # Basic tests for Properties
+    prop_settings = {"datatype": db.REFERENCE, "description": "desc of prop",
+                     "value": db.Record().add_parent(par3), "unit": '°'}
+    t1 = db.Record().add_parent(db.RecordType(id=1))
+    t2 = db.Record().add_parent(db.RecordType(id=1))
+    # Change datatype
+    t1.add_property(db.Property(name="datatype", **prop_settings))
+    prop_settings["datatype"] = par3
+    t2.add_property(db.Property(name="datatype", **prop_settings))
+    # Change description
+    t1.add_property(db.Property(name="description", **prop_settings))
+    prop_settings["description"] = "diff desc"
+    t2.add_property(db.Property(name="description", **prop_settings))
+    # Change value to copy
+    t1.add_property(db.Property(name="value copy", **prop_settings))
+    prop_settings["value"] = db.Record().add_parent(par3)
+    t2.add_property(db.Property(name="value copy", **prop_settings))
+    # Change value to something different
+    t1.add_property(db.Property(name="value", **prop_settings))
+    prop_settings["value"] = db.Record(name="n").add_parent(par3)
+    t2.add_property(db.Property(name="value", **prop_settings))
+    # Change unit
+    t1.add_property(db.Property(name="unit", **prop_settings))
+    prop_settings["unit"] = db.Property(unit='°')
+    t2.add_property(db.Property(name="unit", **prop_settings))
+    # Change unit again
+    t1.add_property(db.Property(name="unit 2", **prop_settings))
+    prop_settings["unit"] = db.Property()
+    t2.add_property(db.Property(name="unit 2", **prop_settings))
+    # Compare
+    diff_0 = compare_entities(t1, t2)
+    diff_1 = compare_entities(t1, t2, compare_referenced_records=True)
+    # Check correct detection of changes
+    assert diff_0[0]["properties"]["datatype"] == {"datatype": db.REFERENCE}
+    assert diff_0[1]["properties"]["datatype"] == {"datatype": par3}
+    assert diff_0[0]["properties"]["description"] == {"description": "desc of prop"}
+    assert diff_0[1]["properties"]["description"] == {"description": "diff desc"}
+    assert "value" in diff_0[0]["properties"]["value copy"]
+    assert "value" in diff_0[1]["properties"]["value copy"]
+    assert "value" in diff_0[0]["properties"]["value"]
+    assert "value" in diff_0[1]["properties"]["value"]
+    assert "unit" in diff_0[0]["properties"]["unit"]
+    assert "unit" in diff_0[1]["properties"]["unit"]
+    assert "unit" in diff_0[0]["properties"]["unit 2"]
+    assert "unit" in diff_0[1]["properties"]["unit 2"]
+    # Check correct result for compare_referenced_records=True
+    assert "value copy" not in diff_1[0]["properties"]
+    assert "value copy" not in diff_1[1]["properties"]
+    diff_0[0]["properties"].pop("value copy")
+    diff_0[1]["properties"].pop("value copy")
+    assert diff_0 == diff_1
+
+    # Basic tests for Parents
+    t3 = db.Record().add_parent(db.RecordType("A")).add_parent(db.Record("B"))
+    t4 = db.Record().add_parent(db.RecordType("A"))
+    assert compare_entities(t3, t4)[0]['parents'] == ['B']
+    assert len(compare_entities(t3, t4)[1]['parents']) == 0
+    t4.add_parent(db.Record("B"))
+    assert empty_diff(t3, t4)
+    # The two following assertions document current behaviour but do not make a
+    # lot of sense
+    t4.add_parent(db.Record("B"))
+    assert empty_diff(t3, t4)
+    t3.add_parent(db.RecordType("A")).add_parent(db.Record("B"))
+    t4.add_parent(db.RecordType("B")).add_parent(db.Record("A"))
+    assert empty_diff(t3, t4)
+
+    # Basic tests for special attributes
+    prop_settings = {"id": 42, "name": "Property",
+                     "datatype": db.LIST(db.REFERENCE), "value": [db.Record(name="")],
+                     "unit": '€', "description": "desc of prop"}
+    alt_settings = {"id": 64, "name": "Property 2",
+                    "datatype": db.LIST(db.TEXT), "value": [db.RecordType(name="")],
+                    "unit": '€€', "description": " ę Ě ப ཾ ཿ ∛ ∜ ㅿ ㆀ 값 "}
+    t5 = db.Property(**prop_settings)
+    t6 = db.Property(**prop_settings)
+    assert empty_diff(t5, t6)
+    # ID
+    t5.id = alt_settings['id']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'id': alt_settings['id']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'id': prop_settings['id']}
+    t6.id = alt_settings['id']
+    assert empty_diff(t5, t6)
+    # Name
+    t5.name = alt_settings['name']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'name': alt_settings['name']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'name': prop_settings['name']}
+    t6.name = alt_settings['name']
+    assert empty_diff(t5, t6)
+    # Description
+    t6.description = alt_settings['description']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'description': prop_settings['description']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'description': alt_settings['description']}
+    t5.description = alt_settings['description']
+    assert empty_diff(t5, t6)
+    # Unit
+    t5.unit = alt_settings['unit']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'unit': alt_settings['unit']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'unit': prop_settings['unit']}
+    t6.unit = alt_settings['unit']
+    assert empty_diff(t5, t6)
+    # Value
+    t6.value = alt_settings['value']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'value': prop_settings['value']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'value': alt_settings['value']}
+    t5.value = alt_settings['value']
+    assert empty_diff(t5, t6)
+    # Datatype
+    t6.datatype = alt_settings['datatype']
+    diff = compare_entities(t5, t6)
+    assert diff[0] == {'properties': {}, 'parents': [], 'datatype': prop_settings['datatype']}
+    assert diff[1] == {'properties': {}, 'parents': [], 'datatype': alt_settings['datatype']}
+    t5.datatype = alt_settings['datatype']
+    assert empty_diff(t5, t6)
+    # All at once
+    diff = compare_entities(db.Property(**prop_settings), db.Property(**alt_settings))
+    assert diff[0] == {'properties': {}, 'parents': [], **prop_settings}
+    assert diff[1] == {'properties': {}, 'parents': [], **alt_settings}
+    # Entity Type
+    diff = compare_entities(db.Property(value=db.Property(id=101)),
+                            db.Property(value=db.Record(id=101)))
+    assert "value" in diff[0]
+    assert "value" in diff[1]
+    diff = compare_entities(db.Property(value=db.Record(id=101)),
+                            db.Property(value=db.Record(id=101)))
+    assert "value" in diff[0]
+    assert "value" in diff[1]
+    assert empty_diff(db.Property(value=db.Record(id=101)),
+                      db.Property(value=db.Record(id=101)),
+                      compare_referenced_records=True)
+
+    # Special cases
+    # Files
+    assert not empty_diff(db.File(path='ABC', file=StringIO("ABC")),
+                          db.File(path='ABC', file=StringIO("Other")))
+    # Importance
+    assert empty_diff(db.Property().add_property(prop2),
+                      db.Property().add_property(prop2))
+    assert not empty_diff(db.Property().add_property(prop2, importance=db.SUGGESTED),
+                          db.Property().add_property(prop2, importance=db.OBLIGATORY))
+    # Mixed Lists
+    assert empty_diff(db.Property(value=[1, 2, 'a', r1]),
+                      db.Property(value=[1, 2, 'a', r1]))
+    # entity_name_id_equivalency
+    assert not empty_diff(db.Property(value=[1, db.Record(id=2), 3, db.Record(id=4)]),
+                          db.Property(value=[db.Record(id=1), 2, db.Record(id=3), 4]))
+    assert empty_diff(db.Property(value=[1, db.Record(id=2), 3, db.Record(id=4)]),
+                      db.Property(value=[db.Record(id=1), 2, db.Record(id=3), 4]),
+                      entity_name_id_equivalency=True)
+    assert empty_diff(db.Property(value=1), db.Property(value=db.Record(id=1)),
+                      entity_name_id_equivalency=True)
+    # entity_name_id_equivalency
+    prop4 = db.Property(**prop_settings).add_parent(par1).add_property(prop2)
+    prop4_c = db.Property(**prop_settings).add_parent(par1).add_property(prop2)
+    prop4.value = db.Record(id=12)
+    prop4_c.value = '12'
+    prop4.add_property(db.Property(name="diff", datatype=db.LIST(db.REFERENCE),
+                                   value=[12, db.Record(id=13), par1, "abc%"]))
+    prop4_c.add_property(db.Property(name="diff", datatype=db.LIST(db.REFERENCE),
+                                     value=[db.Record(id=12), "13", par1, "abc%"]))
+    assert not empty_diff(prop4, prop4_c, entity_name_id_equivalency=False)
+    assert empty_diff(prop4, prop4_c, entity_name_id_equivalency=True)
+    # Order invariance
+    t7 = db.Property(**prop_settings).add_parent(par1).add_property(prop2)
+    t8 = db.Property(**alt_settings).add_parent(par3).add_property(prop3)
+    diffs_0 = compare_entities(t7, t8), compare_entities(t7, t8, True)
+    diffs_1 = compare_entities(t8, t7)[::-1], compare_entities(t8, t7, True)[::-1]
+    assert diffs_0 == diffs_1
+    prop_settings = {"datatype": db.REFERENCE, "description": "desc of prop",
+                     "value": db.Record().add_parent(par3), "unit": '°'}
+    t1.add_property(db.Property(name="description", **prop_settings))
+    t2.add_property(db.Property(name="description", **prop_settings))
+    # Order invariance for multi-property - either both fail or same result
+    try:
+        diffs_0 = compare_entities(t1, t2), compare_entities(t1, t2, True)
+    except Exception as e:
+        diffs_0 = type(e)
+    try:
+        diffs_1 = compare_entities(t2, t1)[::-1], compare_entities(t2, t1, True)[::-1]
+    except Exception as e:
+        diffs_1 = type(e)
+    assert diffs_0 == diffs_1
+    # Property types
+    t09, t10 = db.RecordType(), db.RecordType()
+    for t, ex in [(db.INTEGER, [-12, 0]), (db.DATETIME, ["2030-01-01", "1012-02-29"]),
+                  (db.DOUBLE, [13.23, 7.1]), (db.BOOLEAN, [True, False])]:
+        t09.add_property(db.Property(name=f"{t}:{ex[0]}", datatype=t, value=ex[0]))
+        t10.add_property(db.Property(name=f"{t}:{ex[0]}", datatype=t, value=ex[0]))
+        t09.add_property(name=f"{t}:{ex[1]}", datatype=t, value=ex[1])
+        t10.add_property(name=f"{t}:{ex[1]}", datatype=t, value=ex[1])
+    assert empty_diff(t09, t10)
+    t09.add_property(name=f"diff", value=1)
+    t10.add_property(name=f"diff", value=2)
+    assert not empty_diff(t09, t10)
+    # Default values
+    t09, t10 = db.Record(), db.Record()
+    t09.add_property(db.Property(name=f"A1"), value="A")
+    t10.add_property(name=f"A1", value="A")
+    t09.add_property(db.Property(id=12, name=f"A2"), value="A")
+    t10.add_property(id=12, name=f"A2", value="A")
+    t09.add_property(db.Property(id=15), value="A")
+    t10.add_property(id=15, value="A")
+    assert empty_diff(t09, t10)
+    # ToDo: extended tests for references
+
+
 def test_compare_special_properties():
     # Test for all known special properties:
-    SPECIAL_PROPERTIES = ("description", "name",
-                          "checksum", "size", "path", "id")
     INTS = ("size", "id")
     HIDDEN = ("checksum", "size")
 
-    for key in SPECIAL_PROPERTIES:
+    for key in SPECIAL_ATTRIBUTES:
         set_key = key
         if key in HIDDEN:
             set_key = "_" + key
@@ -215,8 +522,7 @@ def test_compare_special_properties():
         assert len(diff_r1["properties"]) == 0
         assert len(diff_r2["properties"]) == 0
 
-
-def test_compare_properties():
+    # compare Property objects
     p1 = db.Property()
     p2 = db.Property()
 
@@ -467,10 +773,10 @@ def test_empty_diff():
     rec_a.remove_property("RefType")
     rec_b.remove_property("RefType")
     assert empty_diff(rec_a, rec_b)
-    rec_a.add_property(name="RefType", datatype=db.LIST(
-        "RefType"), value=[ref_rec_a, ref_rec_a])
-    rec_b.add_property(name="RefType", datatype=db.LIST(
-        "RefType"), value=[ref_rec_b, ref_rec_b])
+    rec_a.add_property(name="RefType", datatype=db.LIST("RefType"),
+                       value=[ref_rec_a, ref_rec_a])
+    rec_b.add_property(name="RefType", datatype=db.LIST("RefType"),
+                       value=[ref_rec_b, ref_rec_b])
     assert not empty_diff(rec_a, rec_b)
     assert empty_diff(rec_a, rec_b, compare_referenced_records=True)
 
@@ -568,6 +874,21 @@ B: something else"""
     # unchanged
     assert recB.get_property("propA").unit == "cm"
 
+    # test whether an id is correctly overwritten by an entity without id
+    recA = db.Record().add_parent("A").add_property(name="B", value=112)
+    newRec = db.Record().add_parent("B").add_property("c")
+    recB = db.Record().add_parent("A").add_property(name="B", value=newRec)
+
+    merge_entities(recA, recB, force=True)
+    assert recA.get_property("B").value == newRec
+
+    recA = db.Record().add_parent("A").add_property(name="B", value=[112],
+                                                    datatype=db.LIST("B"))
+    recB = db.Record().add_parent("A").add_property(name="B", value=[newRec], datatype=db.LIST(db.REFERENCE))
+
+    merge_entities(recA, recB, force=True)
+    assert recA.get_property("B").value == [newRec]
+
 
 def test_merge_missing_list_datatype_82():
     """Merging two properties, where the list-valued one has no datatype."""
@@ -601,13 +922,12 @@ def test_merge_id_with_resolved_entity():
 
     # Overwrite from right to left in both cases
     merge_entities(recA, recB, merge_id_with_resolved_entity=True)
-    assert recA.get_property(rtname).value == ref_id
-    assert recA.get_property(rtname).value == recB.get_property(rtname).value
+    assert recA.get_property(rtname).value == ref_rec
 
     recA = db.Record().add_property(name=rtname, value=ref_rec)
     merge_entities(recB, recA, merge_id_with_resolved_entity=True)
-    assert recB.get_property(rtname).value == ref_rec
-    assert recA.get_property(rtname).value == recB.get_property(rtname).value
+    assert recB.get_property(rtname).value == ref_id
+    assert recA.get_property(rtname).value == ref_rec
 
     # id mismatches
     recB = db.Record().add_property(name=rtname, value=ref_id*2)
@@ -623,7 +943,79 @@ def test_merge_id_with_resolved_entity():
     # also works in lists:
     recA = db.Record().add_property(
         name=rtname, datatype=db.LIST(rtname), value=[ref_rec, ref_id*2])
-    recB = db.Record().add_property(name=rtname, datatype=db.LIST(rtname), value=[ref_id, ref_id*2])
+    recB = db.Record().add_property(
+        name=rtname, datatype=db.LIST(rtname), value=[ref_id, ref_id*2])
     merge_entities(recA, recB, merge_id_with_resolved_entity=True)
-    assert recA.get_property(rtname).value == [ref_id, ref_id*2]
-    assert recA.get_property(rtname).value == recB.get_property(rtname).value
+    assert recA.get_property(rtname).value == [ref_rec, ref_id*2]
+    assert recB.get_property(rtname).value == [ref_id, ref_id*2]
+
+
+def test_describe_diff():
+    recA = db.Record()
+    recA.add_property(name="propA", value=2)
+    recA.add_property(name="propB", value=2)
+    recA.add_property(name="propD", value=-273, unit="K")
+
+    recB = db.Record()
+    recB.add_property(name="propA", value=2)
+    recB.add_property(name="propB", value=12)
+    recB.add_property(name="propC", value="cool 17")
+    recB.add_property(name="propD", value=-273, unit="°C")
+
+    diff = compare_entities(recA, recB)
+    diffout = describe_diff(*diff)
+
+    assert diffout.startswith("## Difference between the first version and the second version of None")
+
+    # The output of the describe_diff function is currently not ordered (e.g. by name of the property)
+    # so we cannot just compare a well-defined output string.
+
+    assert "it does not exist in the first version:" in diffout
+    assert "first version: {'value': 2}" in diffout
+    assert "second version: {'value': 12}" in diffout
+
+    assert "first version: {'unit': 'K'}" in diffout
+    assert "second version: {'unit': '°C'}" in diffout
+
+    diffout = describe_diff(*diff, name="Entity")
+    assert diffout.startswith("## Difference between the first version and the second version of Entity")
+
+    diffout = describe_diff(*diff, label_e0="recA", label_e1="recB")
+    assert "recA: {'value': 2}" in diffout
+    assert "recB: {'value': 12}" in diffout
+
+    assert "recA: {'unit': 'K'}" in diffout
+    assert "recB: {'unit': '°C'}" in diffout
+
+    assert "it does not exist in the recA:" in diffout
+
+    assert "first" not in diffout
+    assert "second" not in diffout
+
+
+def test_diff_without_names():
+    """Test compare_entities in case of properties and parents with
+    ids and without names
+    (cf. https://gitlab.com/linkahead/linkahead-pylib/-/issues/119).
+
+    """
+
+    r1 = db.Record(name="Test").add_parent(name="TestType")
+    r2 = db.Record(name="Test").add_parent(name="TestType")
+    r2.add_property(id=123, value="Test")
+
+    diff1, diff2 = compare_entities(r1, r2)
+    assert len(diff1["properties"]) == 0
+    assert len(diff2["properties"]) == 1
+    assert 123 in diff2["properties"]
+    assert None not in diff2["properties"]
+
+    r3 = db.Record().add_parent(id=101)
+    r4 = db.Record().add_parent(id=102)
+    diff3, diff4 = compare_entities(r3, r4)
+    assert len(diff3["parents"]) == 1
+    assert 101 in diff3["parents"]
+    assert None not in diff3["parents"]
+    assert len(diff4["parents"]) == 1
+    assert 102 in diff4["parents"]
+    assert None not in diff3["parents"]
diff --git a/unittests/test_authentication_auth_token.py b/unittests/test_authentication_auth_token.py
index 3142f1f9f54230cb19666eeb8ff5809a906f9d49..4eb17bcc3892a0d0cad0f2c86289c2e8c625d426 100644
--- a/unittests/test_authentication_auth_token.py
+++ b/unittests/test_authentication_auth_token.py
@@ -96,6 +96,6 @@ def test_logout_calls_delete():
                              auth_token="[request token]",
                              implementation=MockUpServerConnection)
 
-    c._delegate_connection.resources.append(logout_resource)
+    c._delegate_connection.resources.insert(1, logout_resource)
     c._logout()
     mock.method.assert_called_once()
diff --git a/unittests/test_configs/pylinkahead-timeout1.ini b/unittests/test_configs/pylinkahead-timeout1.ini
new file mode 100644
index 0000000000000000000000000000000000000000..d9f894bfeba4f98ed30d96d8c29e057b5a1e643a
--- /dev/null
+++ b/unittests/test_configs/pylinkahead-timeout1.ini
@@ -0,0 +1,4 @@
+[Connection]
+url=https://localhost:10443/
+password_method = unauthenticated
+timeout = None
diff --git a/unittests/test_configs/pylinkahead-timeout2.ini b/unittests/test_configs/pylinkahead-timeout2.ini
new file mode 100644
index 0000000000000000000000000000000000000000..b3d3796f82148459efb8e19344fe11af9e7934ec
--- /dev/null
+++ b/unittests/test_configs/pylinkahead-timeout2.ini
@@ -0,0 +1,4 @@
+[Connection]
+url=https://localhost:10443/
+password_method = unauthenticated
+timeout = (1,20)
diff --git a/unittests/test_configuration.py b/unittests/test_configuration.py
index 40506e878b18473587da8b694d9381c15bdbd860..772e872c08e0a7c4aae3feffdb58244f6ad0c849 100644
--- a/unittests/test_configuration.py
+++ b/unittests/test_configuration.py
@@ -24,6 +24,7 @@
 
 from os import environ, getcwd, remove
 from os.path import expanduser, isfile, join
+from pathlib import Path
 
 import linkahead as db
 import pytest
@@ -45,24 +46,39 @@ def temp_ini_files():
         remove("pylinkahead.ini")
     if created_temp_ini_home:
         remove(expanduser("~/.pylinkahead.ini"))
-    environ["PYCAOSDBINI"] = "~/.pylinkahead.ini"
+    environ["PYLINKAHEADINI"] = "~/.pylinkahead.ini"
 
 
 def test_config_ini_via_envvar(temp_ini_files):
 
     with raises(KeyError):
-        environ["PYCAOSDBINI"]
+        environ["PYLINKAHEADINI"]
 
-    environ["PYCAOSDBINI"] = "bla bla"
-    assert environ["PYCAOSDBINI"] == "bla bla"
+    environ["PYLINKAHEADINI"] = "bla bla"
+    assert environ["PYLINKAHEADINI"] == "bla bla"
     # test wrong configuration file in envvar
     with pytest.raises(RuntimeError):
         db.configuration._read_config_files()
     # test good configuration file in envvar
-    environ["PYCAOSDBINI"] = "~/.pylinkahead.ini"
+    environ["PYLINKAHEADINI"] = "~/.pylinkahead.ini"
     assert expanduser("~/.pylinkahead.ini") in db.configuration._read_config_files()
     # test without envvar
-    environ.pop("PYCAOSDBINI")
+    environ.pop("PYLINKAHEADINI")
     assert expanduser("~/.pylinkahead.ini") in db.configuration._read_config_files()
     # test configuration file in cwd
     assert join(getcwd(), "pylinkahead.ini") in db.configuration._read_config_files()
+
+
+def test_config_timeout_option():
+    expected_results = [None, (1, 20)]
+    # Iterate through timeout test configs
+    test_configs = Path(__file__).parent/'test_configs'
+    for test_config in test_configs.rglob('pylinkahead-timeout*.ini'):
+        # Test that test configs can be parsed
+        db.configure(str(test_config))
+        dct = db.configuration.config_to_yaml(db.get_config())
+        # Test that resulting dict has correct content for timeout
+        assert 'Connection' in dct
+        assert 'timeout' in dct['Connection']
+        assert dct['Connection']['timeout'] in expected_results
+        expected_results.remove(dct['Connection']['timeout'])
diff --git a/unittests/test_connection.py b/unittests/test_connection.py
index a3a1eff705c64f59baec33088906bdd9a4daa14d..5d22efa46e3a6c10452085d735d1bd6f056a81fc 100644
--- a/unittests/test_connection.py
+++ b/unittests/test_connection.py
@@ -25,14 +25,18 @@
 # pylint: disable=missing-docstring
 from __future__ import print_function, unicode_literals
 
+import io
 import re
 from builtins import bytes, str  # pylint: disable=redefined-builtin
 
+import requests
+
 from linkahead import execute_query
 from linkahead.configuration import _reset_config, get_config
 from linkahead.connection.authentication.interface import CredentialsAuthenticator
 from linkahead.connection.connection import (CaosDBServerConnection,
                                              _DefaultCaosDBServerConnection,
+                                             _WrappedHTTPResponse,
                                              configure_connection)
 from linkahead.connection.mockup import (MockUpResponse, MockUpServerConnection,
                                          _request_log_message)
@@ -216,9 +220,9 @@ def test_init_connection():
 def test_resources_list():
     connection = test_init_connection()
     assert hasattr(connection, "resources")
-    assert len(connection.resources) == 1
-    connection.resources.append(lambda **kwargs: test_init_response())
     assert len(connection.resources) == 2
+    connection.resources.append(lambda **kwargs: test_init_response())
+    assert len(connection.resources) == 3
 
     return connection
 
@@ -324,3 +328,51 @@ def test_auth_token_connection():
                                 "auth_token authenticator cannot log in "
                                 "again. You must provide a new authentication "
                                 "token.")
+
+
+def test_buffer_read():
+    """Test the buffering in _WrappedHTTPResponse.read()"""
+
+    class MockResponse(requests.Response):
+        def __init__(self, content: bytes):
+            """A mock response
+
+            Parameters
+            ----------
+            content : bytes
+              The fake content.
+            """
+            super().__init__()
+            self._content = content
+            bio = io.BytesIO(expected)
+            self.raw = bio
+
+    expected = b"This response."
+    MockResponse(expected)
+
+    #############################
+    # Check for some exceptions #
+    #############################
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    with raises(BufferError) as rte:
+        resp.read(4)
+        resp.read()
+    assert "`size` parameter can not be None" in str(rte.value)
+
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    with raises(BufferError) as rte:
+        resp.read(4)
+        resp.read(0)
+    assert "`size` parameter can not be None" in str(rte.value)
+
+    print("---")
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    result = (
+        resp.read(4)
+        + resp.read(2)
+        + resp.read(2)  # This line failed before.
+        + resp.read(4)  # Reading the rest in two chunks, because of current limitations in read().
+        + resp.read(2)
+    )
+
+    assert result == expected
diff --git a/unittests/test_container.py b/unittests/test_container.py
index c3a60140d43383c81f03c38c9dd5cc7779bc77ba..9df40ffbbdd62b93453058993dbe64bcf3028fb5 100644
--- a/unittests/test_container.py
+++ b/unittests/test_container.py
@@ -70,7 +70,8 @@ def test_get_property_values():
                                           )
     assert len(table) == 2
     house_row = table[0]
-    assert house_row == (house.name, 40.2, "ft", window.id, None, None, None, 20.5, 20.5, "m", owner.name)
+    assert house_row == (house.name, 40.2, "ft", window.id, None, None, None, 20.5, 20.5, "m",
+                         owner.name)
 
     owner_row = table[1]
     assert owner_row == (owner.name, None, None, None, None, None, None, None, None, None, None)
@@ -199,3 +200,15 @@ def test_container_slicing():
 
     with pytest.raises(TypeError):
         cont[[0, 2, 3]]
+
+
+def test_container_filter():
+    # this is a very rudimentary test since filter_by_identity is based on
+    # _filter_entity_list_by_identity which is tested
+    # separately
+    cont = db.Container()
+    cont.extend([db.Record(name=f"TestRec{ii+1}") for ii in range(5)])
+
+    recs = cont.filter_by_identity(name="TestRec2")
+    assert len(recs) == 1
+    recs[0].name == "TestRec2"
diff --git a/unittests/test_entity.py b/unittests/test_entity.py
index abf82f0a9b557cf9d1d2365e01fedaa4eae0c565..f2164d9680471e0ed52b47943f0108ef7e4ce60f 100644
--- a/unittests/test_entity.py
+++ b/unittests/test_entity.py
@@ -22,14 +22,20 @@
 # ** end header
 #
 """Tests for the Entity class."""
+import os
 # pylint: disable=missing-docstring
 import unittest
-from lxml import etree
 
-import os
-from linkahead import (INTEGER, Entity, Property, Record, RecordType,
+import linkahead
+from linkahead import (INTEGER, Entity, Parent, Property, Record, RecordType,
                        configure_connection)
+import warnings
+from linkahead.common.models import (SPECIAL_ATTRIBUTES, get_id_from_versionid,
+                                     value_matches_versionid)
+from linkahead.common.versioning import Version
 from linkahead.connection.mockup import MockUpServerConnection
+from lxml import etree
+from pytest import raises
 
 UNITTESTDIR = os.path.dirname(os.path.abspath(__file__))
 
@@ -82,7 +88,13 @@ class TestEntity(unittest.TestCase):
         self.assertEqual(entity.to_xml().tag, "Property")
 
     def test_instantiation(self):
-        self.assertRaises(Exception, Entity())
+        e = Entity()
+        for attr in SPECIAL_ATTRIBUTES:
+            assert hasattr(e, attr)
+
+    def test_instantiation_bad_argument(self):
+        with self.assertRaises(Exception):
+            Entity(rol="File")
 
     def test_parse_role(self):
         """During parsing, the role of an entity is set explicitely. All other
@@ -97,3 +109,211 @@ class TestEntity(unittest.TestCase):
         # test whether the __role property of this object has explicitely been
         # set.
         self.assertEqual(getattr(entity, "_Entity__role"), "Record")
+
+
+def test_parent_list():
+    p1 = RecordType(name="A")
+    pl = linkahead.common.models.ParentList([p1])
+    assert p1 in pl
+    assert pl.index(p1) == 0
+    assert RecordType(name="A") not in pl
+    assert RecordType(id=101) not in pl
+    p2 = RecordType(id=101)
+    pl.append(p2)
+    assert p2 in pl
+    assert len(pl) == 2
+    p3 = RecordType(id=103, name='B')
+    pl.append(p3)
+    assert len(pl) == 3
+
+    # test removal
+    # remove by id only, even though element in parent list has name and id
+    pl.remove(RecordType(id=103))
+    assert len(pl) == 2
+    assert p3 not in pl
+    assert p2 in pl
+    assert p1 in pl
+    # Same for removal by name
+    pl.append(p3)
+    assert len(pl) == 3
+    pl.remove(RecordType(name='B'))
+    assert len(pl) == 2
+    assert p3 not in pl
+    # And an error if no suitable element can be found
+    with raises(KeyError) as ve:
+        pl.remove(RecordType(id=105, name='B'))
+    assert "not found" in str(ve.value)
+    assert len(pl) == 2
+
+    # TODO also check pl1 == pl2
+
+
+def test_property_list():
+    # TODO: Resolve parent-list TODOs, then transfer to here.
+    # TODO: What other considerations have to be done with properties?
+    p1 = Property(name="A")
+    pl = linkahead.common.models.PropertyList()
+    pl.append(p1)
+    assert p1 in pl
+    assert Property(id=101) not in pl
+    p2 = Property(id=101)
+    pl.append(p2)
+    assert p1 in pl
+    assert p2 in pl
+    p3 = Property(id=103, name='B')
+    pl.append(p3)
+
+
+def test_filter_by_identity():
+    rt1 = RecordType(id=100)
+    rt2 = RecordType(id=101, name="RT")
+    rt3 = RecordType(name="")
+    p1 = Property(id=100)
+    p2 = Property(id=100)
+    p3 = Property(id=101, name="RT")
+    p4 = Property(id=102, name="P")
+    p5 = Property(id=103, name="P")
+    p6 = Property(name="")
+    r1 = Record(id=100)
+    r2 = Record(id=100)
+    r3 = Record(id=101, name="RT")
+    r4 = Record(id=101, name="R")
+    r5 = Record(id=104, name="R")
+    r6 = Record(id=105, name="R")
+    test_ents = [rt1, rt2, rt3, p1, p2, p3, p4, p5, p6, r1, r2, r3, r4, r5, r6]
+
+    # Setup
+    for entity in [Property(name=""), Record(name=""), RecordType(name="")]:
+        for coll in [entity.properties, entity.parents]:
+            for ent in test_ents:
+                assert ent not in coll
+                assert ent not in coll.filter_by_identity(ent)
+
+        # Checks with each type
+        t, t_props, t_pars = entity, entity.properties, entity.parents
+        # Properties
+        # Basic Checks
+        t.add_property(p1)
+        tp1 = t.properties[-1]
+        t.add_property(p3)
+        tp3 = t.properties[-1]
+        assert len(t_props.filter_by_identity(pid=100)) == 1
+        assert tp1 in t_props.filter_by_identity(pid=100)
+        assert len(t_props.filter_by_identity(pid="100")) == 1
+        assert tp1 in t_props.filter_by_identity(pid="100")
+        assert len(t_props.filter_by_identity(pid=101, name="RT")) == 1
+        assert tp3 in t_props.filter_by_identity(pid=101, name="RT")
+        for entity in [rt1, p2, r1, r2]:
+            assert entity not in t_props.filter_by_identity(pid=100)
+            assert tp1 in t_props.filter_by_identity(entity)
+        # Check that direct addition (not wrapped) works
+        t_props.append(p2)
+        tp2 = t_props[-1]
+        assert tp2 in t_props.filter_by_identity(pid=100)
+        assert tp2 not in t_props.filter_by_identity(pid=101, name="RT")
+        for entity in [rt1, r1, r2]:
+            assert entity not in t_props.filter_by_identity(pid=100)
+            assert tp2 in t_props.filter_by_identity(entity)
+
+        # Parents
+        # Filtering with both name and id
+        t.add_parent(r3)
+        tr3 = t.parents[-1]
+        t.add_parent(r5)
+        tr5 = t.parents[-1]
+        assert tr3 in t_pars.filter_by_identity(pid=101)
+        assert tr5 not in t_pars.filter_by_identity(pid=101)
+        assert tr3 not in t_pars.filter_by_identity(name="R")
+        assert tr5 in t_pars.filter_by_identity(name="R")
+        assert tr3 in t_pars.filter_by_identity(pid=101, name="R")
+        assert tr5 not in t_pars.filter_by_identity(pid=101, name="R")
+        assert tr3 not in t_pars.filter_by_identity(pid=104, name="RT")
+        assert tr5 in t_pars.filter_by_identity(pid=104, name="RT")
+        assert tr3 not in t_pars.filter_by_identity(pid=105, name="T")
+        assert tr5 not in t_pars.filter_by_identity(pid=105, name="T")
+        # Works also without id / name and with duplicate parents
+        for ent in test_ents:
+            t.add_parent(ent)
+        for ent in t_pars:
+            assert ent in t_pars.filter_by_identity(ent)
+
+    # Grid-Based
+    r7 = Record()
+    r7.add_property(Property()).add_property(name="A").add_property(name="B")
+    r7.add_property(id=27).add_property(id=27, name="A").add_property(id=27, name="B")
+    r7.add_property(id=43).add_property(id=43, name="A").add_property(id=43, name="B")
+    assert len(r7.properties.filter_by_identity(pid=27)) == 3
+    assert len(r7.properties.filter_by_identity(pid=43)) == 3
+    assert len(r7.properties.filter_by_identity(pid=43, conjunction=True)) == 3
+    assert len(r7.properties.filter_by_identity(name="A")) == 3
+    assert len(r7.properties.filter_by_identity(name="B")) == 3
+    assert len(r7.properties.filter_by_identity(name="B", conjunction=True)) == 3
+    assert len(r7.properties.filter_by_identity(pid=1, name="A")) == 1
+    assert len(r7.properties.filter_by_identity(pid=1, name="A", conjunction=True)) == 0
+    assert len(r7.properties.filter_by_identity(pid=27, name="B")) == 4
+    assert len(r7.properties.filter_by_identity(pid=27, name="B", conjunction=True)) == 1
+    assert len(r7.properties.filter_by_identity(pid=27, name="C")) == 3
+    assert len(r7.properties.filter_by_identity(pid=27, name="C", conjunction=True)) == 0
+    # Entity based filtering behaves the same
+    assert (r7.properties.filter_by_identity(pid=27) ==
+            r7.properties.filter_by_identity(Property(id=27)))
+    assert (r7.properties.filter_by_identity(pid=43, conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=43), conjunction=True))
+    assert (r7.properties.filter_by_identity(name="A") ==
+            r7.properties.filter_by_identity(Property(name="A")))
+    assert (r7.properties.filter_by_identity(name="B") ==
+            r7.properties.filter_by_identity(Property(name="B")))
+    assert (r7.properties.filter_by_identity(name="B", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(name="B"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=1, name="A") ==
+            r7.properties.filter_by_identity(Property(id=1, name="A")))
+    assert (r7.properties.filter_by_identity(pid=1, name="A", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=1, name="A"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=27, name="B") ==
+            r7.properties.filter_by_identity(Property(id=27, name="B")))
+    assert (r7.properties.filter_by_identity(pid=27, name="B", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=27, name="B"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=27, name="C") ==
+            r7.properties.filter_by_identity(Property(id=27, name="C")))
+    assert (r7.properties.filter_by_identity(pid=27, name="C", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=27, name="C"), conjunction=True))
+    # Name only matching and name overwrite
+    r8 = Record().add_property(name="A").add_property(name="B").add_property(name="B")
+    r8.add_property(Property(name="A"), name="B")
+    r8.add_property(Property(name="A", id=12), name="C")
+    assert len(r8.properties.filter_by_identity(name="A")) == 1
+    assert len(r8.properties.filter_by_identity(name="B")) == 3
+    assert len(r8.properties.filter_by_identity(name="C")) == 1
+    assert len(r8.properties.filter_by_identity(pid=12)) == 1
+
+    with warnings.catch_warnings(record=True) as w:
+        # Cause all warnings to always be triggered.
+        warnings.simplefilter("always")
+
+        r7.properties.filter(pid=34)
+        assert issubclass(w[-1].category, DeprecationWarning)
+        assert "This function was renamed" in str(w[-1].message)
+
+        t.parents.filter(pid=234)
+        assert issubclass(w[-1].category, DeprecationWarning)
+        assert "This function was renamed" in str(w[-1].message)
+
+
+def test_value_matches_versionid():
+    assert value_matches_versionid(234) is False, "integer is no version id"
+    assert value_matches_versionid("234") is False, ("string that only contains an integer is no "
+                                                     "version id")
+    assert value_matches_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") is True, (
+        "integer is no version id") is True
+    with raises(ValueError):
+        value_matches_versionid(234.0)
+
+
+def test_get_id_from_versionid():
+    assert get_id_from_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") == "234"
+
+
+def test_get_versionid():
+    e = Entity(id=234)
+    e.version = Version(id="bfe1a42cb37aae8ac625a757715d38814c274158")
+    assert e.get_versionid() == "234@bfe1a42cb37aae8ac625a757715d38814c274158"
diff --git a/unittests/test_error_handling.py b/unittests/test_error_handling.py
index 3f5241466e9a8f810b581cbb587e17ccf8f123ee..64f743c85e9df554e7428cf7d8477e8c823a9758 100644
--- a/unittests/test_error_handling.py
+++ b/unittests/test_error_handling.py
@@ -30,7 +30,7 @@ import linkahead as db
 from linkahead.common.models import raise_errors
 from linkahead.exceptions import (AuthorizationError,
                                   EntityDoesNotExistError, EntityError,
-                                  EntityHasNoDatatypeError,
+                                  EntityHasNoDatatypeError, HTTPServerError,
                                   TransactionError, UniqueNamesError,
                                   UnqualifiedParentsError,
                                   UnqualifiedPropertiesError)
@@ -315,3 +315,26 @@ def test_container_with_faulty_elements():
             # record raises both of them
             assert (isinstance(err, UnqualifiedParentsError) or
                     isinstance(err, UnqualifiedPropertiesError))
+
+
+def test_incomplete_server_error_response():
+    """The reason behind https://gitlab.com/linkahead/linkahead-pylib/-/issues/87."""
+    # Case 1: Response is no XML at all
+    err = HTTPServerError("Bla")
+    assert str(err) == "Bla"
+
+    # Case 2: Response is an incomplete XML, e.g. due to very unlucky timeout
+    err = HTTPServerError("<incomplete>XML</inc")
+    assert str(err) == "<incomplete>XML</inc"
+
+    # Case 3: Response is complete XML but doesn't have response and or error information
+    err = HTTPServerError("<complete>XML</complete>")
+    assert str(err) == "<complete>XML</complete>"
+
+    # Case 4: Response is an XML response but the error is lacking a description
+    err = HTTPServerError("<Response><Error>complete error</Error></Response>")
+    assert str(err) == "complete error"
+
+    # Case 5: Healthy error Response
+    err = HTTPServerError("<Response><Error description='Error'>complete error</Error></Response>")
+    assert str(err) == "Error\n\ncomplete error"
diff --git a/unittests/test_issues.py b/unittests/test_issues.py
index 7472f710cea32c1d76f11e52fe7c3c3617804c3c..3b0117b28c1300ea1eb0919fce02e3881c2ab025 100644
--- a/unittests/test_issues.py
+++ b/unittests/test_issues.py
@@ -24,7 +24,9 @@ import os
 import lxml
 import linkahead as db
 
+from datetime import date, datetime
 from pytest import raises
+from linkahead.common.utils import xml2str
 
 
 def test_issue_100():
@@ -64,3 +66,65 @@ def test_issue_156():
     # </ParentList>
     assert value is project
     assert parents[0].name == "RTName"
+
+
+def test_issue_128():
+    """Test assigning datetime.date(time) values to DATETIME
+    properties:
+    https://gitlab.com/linkahead/linkahead-pylib/-/issues/128.
+
+    """
+    # Test assignement correct assignment for both datatype=DATETIME
+    # and datatype=LIST<DATETIME>, just to be sure.
+    prop = db.Property(name="TestDatetime", datatype=db.DATETIME)
+    prop_list = db.Property(name="TestListDatetime", datatype=db.LIST(db.DATETIME))
+
+    today = date.today()
+    now = datetime.now()
+
+    prop.value = today
+    assert prop.value == today
+    prop.value = now
+    assert prop.value == now
+
+    prop_list.value = [today, today]
+    assert prop_list.value == [today, today]
+    prop_list.value = [now, now]
+    assert prop_list.value == [now, now]
+
+
+def test_issue_73():
+    """
+    Test to_xml infinite recursion handling with cross- and self-references.
+    https://gitlab.com/linkahead/linkahead-pylib/-/issues/73
+    """
+    # Cross-reference in the property values
+    rt = db.RecordType(name="RT")
+    recA = db.Record().add_parent(rt)
+    recB = db.Record().add_parent(rt)
+    recA.add_property(name="RT", value=recB)
+    recB.add_property(name="RT", value=recA)
+    xml_str = xml2str(recB.to_xml())
+    assert "<Parent name=\"RT" in xml_str
+    assert "<Property name=\"RT" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500
+
+    # Cross-reference in the properties themselves
+    prop1 = db.Property(name="Prop1")
+    prop2 = db.Property(name="Prop2")
+    prop1.add_property(prop2)
+    prop2.add_property(prop1)
+    xml_str = xml2str(prop2.to_xml())
+    assert "<Property name=\"Prop1" in xml_str
+    assert "<Property name=\"Prop2" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500
+
+    # Self-reference in the datatype
+    prop = db.Property()
+    prop.datatype = prop
+    xml_str = xml2str(prop.to_xml())
+    assert "datatype=" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500