diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8845e4070c685230a99958fbebd9377238df32de..a773c6776b5224dc482bc104fe490e98b9e19eb5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -57,22 +57,10 @@ mypy:
   tags: [ docker ]
   stage: linting
   script:
-    - pip install mypy types-PyYAML types-jsonschema types-requests types-setuptools types-lxml types-python-dateutil pytest
+    - pip install .[mypy,test]
     - make mypy
-  allow_failure: true
 
 # run unit tests
-unittest_py3.8:
-  tags: [ docker ]
-  stage: test
-  needs: [ ]
-  image: python:3.8
-  script: &python_test_script
-    # Python docker has problems with tox and pip so use plain pytest here
-    - touch ~/.pylinkahead.ini
-    - pip install pynose pytest pytest-cov jsonschema>=4.4.0 setuptools
-    - pip install .
-    - python -m pytest unittests
 
 # This needs to be changed once Python 3.9 isn't the standard Python in Debian
 # anymore.
@@ -92,7 +80,11 @@ unittest_py3.10:
   stage: test
   needs: [ ]
   image: python:3.10
-  script: *python_test_script
+  script: &python_test_script
+    # Python docker has problems with tox and pip so use plain pytest here
+    - touch ~/.pylinkahead.ini
+    - pip install .[test]
+    - python -m pytest unittests
 
 unittest_py3.11:
   tags: [ docker ]
@@ -109,13 +101,26 @@ unittest_py3.12:
   script: *python_test_script
 
 unittest_py3.13:
-  allow_failure: true
   tags: [ docker ]
   stage: test
   needs: [ ]
   image: python:3.13
   script: *python_test_script
 
+unittest_py3.14:
+  allow_failure: true   # remove on release
+  tags: [ docker ]
+  stage: test
+  needs: [ ]
+  image: python:3.14-rc
+  script:               # replace by '*python_test_script' on release
+    # Install cargo manually, source its env, and set it to accept 3.14 as interpreter
+    - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
+    - . "$HOME/.cargo/env"
+    - export PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1
+    # Continue normally
+    - *python_test_script
+
 # Trigger building of server image and integration tests
 trigger_build:
   stage: deploy
@@ -160,7 +165,7 @@ build-testenv:
 pages_prepare: &pages_prepare
   tags: [ cached-dind ]
   stage: deploy
-  needs: [ code_style, pylint, unittest_py3.8, unittest_py3.9, unittest_py3.10 ]
+  needs: [ code_style, pylint, unittest_py3.9, unittest_py3.10 ]
   only:
     refs:
       - /^release-.*$/i
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d77e58424d9932fd8178536c4cf7dda6a697469c..20f2498a09e5599933c18606febf8f160594a3c8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,15 +8,79 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 ## [Unreleased] ##
 
 ### Added ###
+- convenience functions `value_matches_versionid`, `get_id_from_versionid` and `get_versionid`
+
+### Changed ###
+
+### Deprecated ###
+
+### Removed ###
+
+### Fixed ###
+
+### Security ###
+
+### Documentation ###
+
+* [#78](https://gitlab.com/linkahead/linkahead-pylib/-/issues/78) Fix
+  and extend test-registration docstrings.
+
+## [0.17.0] - 2025-01-14 ##
+
+### Added ###
+
+* New setup extra `test` which installs the dependencies for testing.
+* The Container class has a new member function `filter_by_identity`
+  which is based on `_filter_entity_list`.
+* The `Entity` properties `_cuid` and `_flags` are now available for read-only access
+  as `cuid` and `flags`, respectively.
+
+### Changed ###
+
+* Renamed the `filter` function of Container, ParentList and
+  PropertyList to `filter_by_identity`.
+
+### Deprecated ###
+
+* `ParentList.filter` and `PropertyList.filter` functions, use
+  `filter_by_identity` instead.
+
+### Removed ###
+
+* Support for Python 3.8
+
+### Fixed ###
+
+* [#73](https://gitlab.com/linkahead/linkahead-pylib/-/issues/73)
+  `Entity.to_xml` now detects potentially infinite recursion and prevents an error
+* [#89](https://gitlab.com/linkahead/linkahead-pylib/-/issues/89)
+  `to_xml` does not add `noscript` or `TransactionBenchmark` tags anymore
+* [#103](https://gitlab.com/linkahead/linkahead-pylib/-/issues/103)
+  `authentication/interface/on_response()` does not overwrite
+  `auth_token` if new value is `None`
+* [#119](https://gitlab.com/linkahead/linkahead-pylib/-/issues/119)
+  The diff returned by compare_entities now uses id instead of name as
+  key if either property does not have a name
+* [#87](https://gitlab.com/linkahead/linkahead-pylib/-/issues/87)
+  `XMLSyntaxError` messages when parsing (incomplete) responses in
+  case of certain connection timeouts.
+  The diff returned by compare_entities now uses id instead of name as key if either property does not have a name
+* [#127](https://gitlab.com/linkahead/linkahead-pylib/-/issues/127)
+  pylinkahead.ini now supports None and tuples as values for the `timeout` keyword
+
+## [0.16.0] - 2024-11-13 ##
+
+### Added ###
+
 * `ParentList` and `PropertyList` now have a `filter` function that allows to select a subset of
   the contained elements by ID and/or name.
-
 * Official support for Python 3.13
 * Added arguments to `describe_diff` that allow customizing the labels for the 'old' and the 'new' diffs.
 * Optional `realm` argument for `linkahead_admin.py set_user_password`
   which defaults to `None`, i.e., the server's default realm.
 
 ### Changed ###
+
 * `compare_entities` is now case insensitive with respect to property and
   recordtype names
 * `_ParentList` is now called `ParentList`
@@ -24,21 +88,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 * `ParentList.remove` is now case insensitive when a name is used.
 
 ### Deprecated ###
+
 * the use of the arguments `old_entity` and `new_entity` in `compare_entities`
   is now deprecated. Please use `entity0` and `entity1` respectively instead.
 
-### Removed ###
-
 ### Fixed ###
 
-* [gitlab.indiscale.com#200](https://gitlab.indiscale.com/caosdb/src/caosdb-pylib/-/merge_requests/153)
+* [gitlab.indiscale.com#200](https://gitlab.indiscale.com/caosdb/src/caosdb-pylib/-/issues/200)
   ``linkahead_admin.py`` prints reasonable error messages when users
   or roles don't exist.
 
-### Security ###
-
-### Documentation ###
-
 ## [0.15.1] - 2024-08-21 ##
 
 ### Deprecated ###
diff --git a/CITATION.cff b/CITATION.cff
index 3f51bdf839a5e0451f3d3aaf7f128f61b29927fc..bcecc2fdd962f4c581a2d53d5c1a324fb643a4a3 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -20,6 +20,6 @@ authors:
     given-names: Stefan
     orcid: https://orcid.org/0000-0001-7214-8125
 title: CaosDB - Pylib
-version: 0.15.1
+version: 0.17.0
 doi: 10.3390/data4020083
-date-released: 2024-08-21
+date-released: 2025-01-14
diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index e2326b831a71751265c6c2d5a333ccc37145bfa5..e9bd54a1459df22afa307e256625d05e74bdc6a8 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -1,5 +1,5 @@
 * caosdb-server >= 0.12.0
-* Python >= 3.8
+* Python >= 3.9
 * pip >= 20.0.2
 
 Any other dependencies are defined in the setup.py and are being installed via pip
diff --git a/Makefile b/Makefile
index 21ea40ac8a6eb34032aba75c089e278fa354a6f5..7490c5d586c18d9da361f8940a39550779fb5938 100644
--- a/Makefile
+++ b/Makefile
@@ -40,7 +40,7 @@ style:
 .PHONY: style
 
 lint:
-	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead/common
+	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead
 .PHONY: lint
 
 mypy:
diff --git a/README.md b/README.md
index 193cb8f0cff8ff5cee36a40a78e53f070527e2e0..d630e879e9ff4781bb79b193d0240ef11ae211d2 100644
--- a/README.md
+++ b/README.md
@@ -47,7 +47,7 @@ However, you can also create an issue for it.
 
 * Copyright (C) 2018 Research Group Biomedical Physics, Max Planck Institute
   for Dynamics and Self-Organization Göttingen.
-* Copyright (C) 2020-2022 Indiscale GmbH <info@indiscale.com>
+* Copyright (C) 2020-2025 Indiscale GmbH <info@indiscale.com>
 
 All files in this repository are licensed under a [GNU Affero General Public
 License](LICENCE.md) (version 3 or later).
diff --git a/README_SETUP.md b/README_SETUP.md
index 8a32fbfacb8fd5733c65998b35e52e1c7bbceab1..f4c921382edb26776391590298faed06a5391396 100644
--- a/README_SETUP.md
+++ b/README_SETUP.md
@@ -2,24 +2,44 @@
 
 ## Installation ##
 
-### Requirements ###
+### How to install ###
 
-PyCaosDB needs at least Python 3.8.  Additionally, the following packages are required (they will
-typically be installed automatically):
+First ensure that python with at least version 3.9 is installed. Should this not be
+the case, you can use the [Installing python](#installing-python-) guide for your OS.
 
-- `lxml`
-- `PyYaml`
-- `PySocks`
+#### Generic installation ####
 
-Optional packages:
-- `keyring`
-- `jsonschema`
+To install this LinkAhead python client locally, use `pip`/`pip3`:
 
-### How to install ###
+```sh
+pip install linkahead
+```
+
+#### Additional dependencies ####
+
+To test using tox, you also need to install tox:  
+`pip install tox`  
+
+To install dependencies used by optional functionality, the following pip extras 
+keywords are defined:
+- `test` for testing with pytest
+- `mypy` for mypy and types
+- `jsonschema` 
+- `keyring` 
+
+These extras can be installed using: 
+```sh
+pip install .[KEYWORD]
+```
+A current list of the dependencies installed with this program as well as those installed with 
+the keywords can be found in `setup.py`s `setup_package()` method, in the `metadata` dictionary
+entries `install_requires` and `extras_require`.
+
+### Installing python ###
 
 #### Linux ####
 
-Make sure that Python (at least version 3.8) and pip is installed, using your system tools and
+Make sure that Python (at least version 3.9) and pip is installed, using your system tools and
 documentation.
 
 Then open a terminal and continue in the [Generic installation](#generic-installation) section.
@@ -51,34 +71,7 @@ cd /Applications/Python\ 3.9/
 sudo ./Install\ Certificates.command
 ```
 
-After these steps, you may continue with the [Generic
-installation](#generic-installation).
-
-#### Generic installation ####
-
-To install PyCaosDB locally, use `pip3` (also called `pip` on some systems):
-
-```sh
-pip3 install --user caosdb
-```
-
----
-
-Alternatively, obtain the sources from GitLab and install from there (`git` must be installed for
-this option):
-
-```sh
-git clone https://gitlab.com/caosdb/caosdb-pylib
-cd caosdb-pylib
-pip3 install --user .
-```
-
-For installation of optional packages, install with an additional option, e.g. for 
-validating with the caosdb json schema:
-
-```sh
-pip3 install --user .[jsonschema]
-```
+After these steps, you may continue with the [Generic installation](#generic-installation) section.
 
 ## Configuration ##
 
@@ -87,7 +80,7 @@ is described in detail in the [configuration section of the documentation](https
 
 ## Try it out ##
 
-Start Python and check whether the you can access the database. (You will be asked for the
+Start Python and check whether you can access the database. (You will be asked for the
 password):
 
 ```python
@@ -107,6 +100,7 @@ Now would be a good time to continue with the [tutorials](tutorials/index).
 - Run all tests: `tox` or `make unittest`
 - Run a specific test file: e.g. `tox -- unittests/test_schema.py`
 - Run a specific test function: e.g. `tox -- unittests/test_schema.py::test_config_files`
+- To run using pytest: `pytest .`
 
 ## Documentation ##
 We use sphinx to create the documentation. Docstrings in the code should comply
@@ -114,13 +108,6 @@ with the Googly style (see link below).
 
 Build documentation in `build/` with `make doc`.
 
-### Requirements ###
-
-- `sphinx`
-- `sphinx-autoapi`
-- `recommonmark`
-- `sphinx_rtd_theme`
-
 ### How to contribute ###
 
 - [Google Style Python Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
@@ -128,7 +115,7 @@ Build documentation in `build/` with `make doc`.
 - [References to other documentation](https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#role-external)
 
 ### Troubleshooting ###
-If the client is to be executed directly from the `/src` folder, an initial `.\setup.py install --user` must be called.
+If the client is to be executed directly from the `/src` folder, an initial `.\setup.py install` must be called.
 
 ## Migration ##
 TODO
diff --git a/setup.py b/setup.py
index 4e136f6d8d915b2a4437c1d42a0af25b85f45c5b..ab8555b858b0004b59e979ac2de7b2b8450073b1 100755
--- a/setup.py
+++ b/setup.py
@@ -48,8 +48,8 @@ from setuptools import find_packages, setup
 
 ISRELEASED = False
 MAJOR = 0
-MINOR = 15
-MICRO = 2
+MINOR = 17
+MICRO = 1
 # Do not tag as pre-release until this commit
 # https://github.com/pypa/packaging/pull/515
 # has made it into a release. Probably we should wait for pypa/packaging>=21.4
@@ -179,7 +179,7 @@ def setup_package():
             "Topic :: Scientific/Engineering :: Information Analysis",
         ],
         packages=find_packages('src'),
-        python_requires='>=3.8',
+        python_requires='>=3.9',
         package_dir={'': 'src'},
         install_requires=['lxml>=4.6.3',
                           "requests[socks]>=2.26",
@@ -187,11 +187,27 @@ def setup_package():
                           'PyYAML>=5.4.1',
                           'future',
                           ],
-        extras_require={'keyring': ['keyring>=13.0.0'],
-                        'jsonschema': ['jsonschema>=4.4.0']},
+        extras_require={
+            "jsonschema": ["jsonschema>=4.4.0"],
+            "keyring": ["keyring>=13.0.0"],
+            "mypy": [
+                "mypy",
+                "types-PyYAML",
+                "types-jsonschema",
+                "types-requests",
+                "types-setuptools",
+                "types-lxml",
+                "types-python-dateutil",
+            ],
+            "test": [
+                "pytest",
+                "pytest-cov",
+                "coverage>=4.4.2",
+                "jsonschema>=4.4.0",
+            ]
+
+        },
         setup_requires=["pytest-runner>=2.0,<3dev"],
-        tests_require=["pytest", "pytest-cov", "coverage>=4.4.2",
-                       "jsonschema>=4.4.0"],
         package_data={
             'linkahead': ['py.typed', 'cert/indiscale.ca.crt', 'schema-pycaosdb-ini.yml'],
         },
diff --git a/src/doc/conf.py b/src/doc/conf.py
index e5ead4a5015a606bf91f2e0009d7165f995206b5..ce1aa8ff92d18d1ccd0bd408b1ce9e3523d03940 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -25,14 +25,14 @@ import sphinx_rtd_theme  # noqa: E402
 # -- Project information -----------------------------------------------------
 
 project = 'pylinkahead'
-copyright = '2023, IndiScale GmbH'
+copyright = '2024, IndiScale GmbH'
 author = 'Daniel Hornung'
 
 # The short X.Y version
-version = '0.15.2'
+version = '0.17.1'
 # The full version, including alpha/beta/rc tags
 # release = '0.5.2-rc2'
-release = '0.15.2-dev'
+release = '0.17.1-dev'
 
 
 # -- General configuration ---------------------------------------------------
diff --git a/src/doc/tutorials/complex_data_models.rst b/src/doc/tutorials/complex_data_models.rst
index 168cf3b9f0d6839ed8f78beb01ae24fb9d489e88..52757c320b42f18b4b24ab9b7575e7bd0becc252 100644
--- a/src/doc/tutorials/complex_data_models.rst
+++ b/src/doc/tutorials/complex_data_models.rst
@@ -78,7 +78,7 @@ Examples
 
 
 Finding parents and properties
---------
+------------------------------
 To find a specific parent or property of an Entity, its
 ParentList or PropertyList can be filtered using names, ids, or
 entities. A short example:
@@ -100,29 +100,45 @@ entities. A short example:
    properties = r.properties
 
    # As r only has one property with id 101, this returns a list containing only p1_1
-   properties.filter(pid=101)
+   properties.filter_by_identity(pid=101)
    # Result: [p1_1]
 
    # Filtering with name="Property 1" returns both p1_1 and p1_2, as they share their name
-   properties.filter(name="Property 1")
+   properties.filter_by_identity(name="Property 1")
    # Result: [p1_1, p1_2]
 
    #  If both name and pid are given, matching is based only on pid for all entities that have an id
-   properties.filter(pid="102", name="Other Property")
+   properties.filter_by_identity(pid="102", name="Other Property")
    # Result: [p2_1, p2_2, p2_3]
 
-   # However, filtering with name="Property 1" and id=101 returns both p1_1 and p1_2, because
+   # However, filter_by_identity with name="Property 1" and id=101 returns both p1_1 and p1_2, because
    # p1_2 does not have an id and matches the name
-   properties.filter(pid="101", name="Property 1")
+   properties.filter_by_identity(pid="101", name="Property 1")
    # Result: [p1_1, p1_2]
 
    # We can also filter using an entity, in which case the name and id of the entity are used:
-   properties.filter(pid="102", name="Property 2") == properties.filter(p2_1)
+   properties.filter_by_identity(pid="102", name="Property 2") == properties.filter_by_identity(p2_1)
    # Result: True
 
    # If we only need properties that match both id and name, we can set the parameter
    # conjunction to True:
-   properties.filter(pid="102", name="Property 2", conjunction=True)
+   properties.filter_by_identity(pid="102", name="Property 2", conjunction=True)
    # Result: [p2_1]
 
 The filter function of ParentList works analogously.
+
+Finding entities in a Container
+-------------------------------
+In the same way as described above, Container can be filtered.
+A short example:
+
+.. code-block:: python3
+
+   import linkahead as db
+
+   # Setup a record with six properties
+   p1 = db.Property(id=101, name="Property 1")
+   p2 = db.Property(name="Property 2")
+   c = db.Container().extend([p1,p2])
+   c.filter_by_identity(name="Property 1")
+   # Result: [p1]
diff --git a/src/linkahead/__init__.py b/src/linkahead/__init__.py
index 567748e3b3a58fb73b91f652d82ed10f818d6014..ac8df123c66ee092801e63a427003fbb7478fc0c 100644
--- a/src/linkahead/__init__.py
+++ b/src/linkahead/__init__.py
@@ -55,7 +55,7 @@ from .utils.get_entity import (get_entity_by_id, get_entity_by_name,
                                get_entity_by_path)
 
 try:
-    from .version import version as __version__
+    from .version import version as __version__  # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
     __version__ = version
diff --git a/src/linkahead/apiutils.py b/src/linkahead/apiutils.py
index 49336aa8db24fba663337185c5c37a346330c4cd..b2a612faea1616c64b7e78575156abccfdb29e61 100644
--- a/src/linkahead/apiutils.py
+++ b/src/linkahead/apiutils.py
@@ -197,13 +197,19 @@ def compare_entities(entity0: Optional[Entity] = None,
     properties and SPECIAL_ATTRIBUTES if they are missing or different from
     their counterparts in the other entity.
 
+    The key used to represent a parent in the parent list or a
+    property in the property dictionary is the entity's name if the
+    name is present for both compared entities, the id otherwise.
+
     The value of the properties dict for each listed property is again a dict
     detailing the differences between this property and its counterpart.
     The characteristics that are checked to determine whether two properties
     match are the following:
-        - datatype
-        - importance
-        - value
+
+    - datatype
+    - importance
+    - value
+
     If any of these characteristics differ for a property, the respective
     string (datatype, importance, value) is added as a key to the dict of the
     property with its value being the characteristics value,
@@ -224,9 +230,9 @@ def compare_entities(entity0: Optional[Entity] = None,
 
     Params
     ------
-    entity0                : Entity
+    entity0:                    Entity
                                 First entity to be compared.
-    entity1                : Entity
+    entity1:                    Entity
                                 Second entity to be compared.
     compare_referenced_records: bool, default: False
                                 If set to True, values with referenced records
@@ -242,6 +248,7 @@ def compare_entities(entity0: Optional[Entity] = None,
                                 entity and an int or str also checks whether
                                 the int/str matches the name or id of the
                                 entity, so Entity(id=100) == 100 == "100".
+
     """
     # ToDo: Discuss intended behaviour
     # Questions that need clarification:
@@ -275,9 +282,11 @@ def compare_entities(entity0: Optional[Entity] = None,
         if entity1 is not None:
             raise ValueError("You cannot use both entity1 and new_entity")
         entity1 = new_entity
+    assert entity0 is not None
+    assert entity1 is not None
 
-    diff: tuple = ({"properties": {}, "parents": []},
-                   {"properties": {}, "parents": []})
+    diff: tuple[dict[str, Any], dict[str, Any]] = ({"properties": {}, "parents": []},
+                                                   {"properties": {}, "parents": []})
 
     if entity0 is entity1:
         return diff
@@ -375,15 +384,20 @@ def compare_entities(entity0: Optional[Entity] = None,
 
     # compare properties
     for prop in entity0.properties:
-        matching = entity1.properties.filter(name=prop.name, pid=prop.id)
+        # ToDo: Would making id default break anything?
+        key = prop.name if prop.name is not None else prop.id
+        matching = entity1.properties.filter_by_identity(prop)
         if len(matching) == 0:
             # entity1 has prop, entity0 does not
-            diff[0]["properties"][prop.name] = {}
+            diff[0]["properties"][key] = {}
         elif len(matching) == 1:
-            diff[0]["properties"][prop.name] = {}
-            diff[1]["properties"][prop.name] = {}
-            propdiff = (diff[0]["properties"][prop.name],
-                        diff[1]["properties"][prop.name])
+            # It's possible that prop has name and id, but match only has id
+            key = prop.name if (prop.name is not None and
+                                matching[0].name == prop.name) else prop.id
+            diff[0]["properties"][key] = {}
+            diff[1]["properties"][key] = {}
+            propdiff = (diff[0]["properties"][key],
+                        diff[1]["properties"][key])
 
             # We should compare the wrapped properties instead of the
             # wrapping entities if possible:
@@ -415,8 +429,8 @@ def compare_entities(entity0: Optional[Entity] = None,
 
             # in case there is no difference, we remove the dict keys again
             if len(propdiff[0]) == 0 and len(propdiff[1]) == 0:
-                diff[0]["properties"].pop(prop.name)
-                diff[1]["properties"].pop(prop.name)
+                diff[0]["properties"].pop(key)
+                diff[1]["properties"].pop(key)
 
         else:
             raise NotImplementedError(
@@ -424,11 +438,12 @@ def compare_entities(entity0: Optional[Entity] = None,
 
     # we have not yet compared properties that do not exist in entity0
     for prop in entity1.properties:
+        key = prop.name if prop.name is not None else prop.id
         # check how often the property appears in entity0
-        num_prop_in_ent0 = len(entity0.properties.filter(prop))
+        num_prop_in_ent0 = len(entity0.properties.filter_by_identity(prop))
         if num_prop_in_ent0 == 0:
             # property is only present in entity0 - add to diff
-            diff[1]["properties"][prop.name] = {}
+            diff[1]["properties"][key] = {}
         if num_prop_in_ent0 > 1:
             # Check whether the property is present multiple times in entity0
             # and raise error - result would be incorrect
@@ -439,9 +454,10 @@ def compare_entities(entity0: Optional[Entity] = None,
     for index, parents, other_entity in [(0, entity0.parents, entity1),
                                          (1, entity1.parents, entity0)]:
         for parent in parents:
-            matching = other_entity.parents.filter(parent)
+            key = parent.name if parent.name is not None else parent.id
+            matching = other_entity.parents.filter_by_identity(parent)
             if len(matching) == 0:
-                diff[index]["parents"].append(parent.name)
+                diff[index]["parents"].append(key)
                 continue
 
     return diff
@@ -550,9 +566,10 @@ def merge_entities(entity_a: Entity,
     """
 
     # Compare both entities:
-    diff_r1, diff_r2 = compare_entities(entity_a, entity_b,
-                                        entity_name_id_equivalency=merge_id_with_resolved_entity,
-                                        compare_referenced_records=merge_references_with_empty_diffs)
+    diff_r1, diff_r2 = compare_entities(
+        entity_a, entity_b,
+        entity_name_id_equivalency=merge_id_with_resolved_entity,
+        compare_referenced_records=merge_references_with_empty_diffs)
 
     # Go through the comparison and try to apply changes to entity_a:
     for key in diff_r2["parents"]:
diff --git a/src/linkahead/common/administration.py b/src/linkahead/common/administration.py
index 28ef107579fccb689b7337aed65e054cfbf36c05..9d9d4f013f1ad10cd0957cfb9a9e4f2f44bd6102 100644
--- a/src/linkahead/common/administration.py
+++ b/src/linkahead/common/administration.py
@@ -91,7 +91,7 @@ def get_server_properties() -> dict[str, Optional[str]]:
     props: dict[str, Optional[str]] = dict()
 
     for elem in xml.getroot():
-        props[elem.tag] = elem.text
+        props[str(elem.tag)] = str(elem.text)
 
     return props
 
@@ -156,7 +156,10 @@ def generate_password(length: int):
 def _retrieve_user(name: str, realm: Optional[str] = None, **kwargs):
     con = get_connection()
     try:
-        return con._http_request(method="GET", path="User/" + (realm + "/" + name if realm is not None else name), **kwargs).read()
+        return con._http_request(
+            method="GET",
+            path="User/" + (realm + "/" + name if realm is not None else name),
+            **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to retrieve this user."
         raise
@@ -198,7 +201,9 @@ def _update_user(name: str,
     if entity is not None:
         params["entity"] = str(entity)
     try:
-        return con.put_form_data(entity_uri_segment="User/" + (realm + "/" + name if realm is not None else name), params=params, **kwargs).read()
+        return con.put_form_data(entity_uri_segment="User/" + (realm + "/" +
+                                                               name if realm is not None else name),
+                                 params=params, **kwargs).read()
     except HTTPResourceNotFoundError as e:
         e.msg = "User does not exist."
         raise e
@@ -246,7 +251,9 @@ def _insert_user(name: str,
 def _insert_role(name, description, **kwargs):
     con = get_connection()
     try:
-        return con.post_form_data(entity_uri_segment="Role", params={"role_name": name, "role_description": description}, **kwargs).read()
+        return con.post_form_data(entity_uri_segment="Role",
+                                  params={"role_name": name, "role_description": description},
+                                  **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to insert a new role."
         raise
@@ -259,7 +266,9 @@ def _insert_role(name, description, **kwargs):
 def _update_role(name, description, **kwargs):
     con = get_connection()
     try:
-        return con.put_form_data(entity_uri_segment="Role/" + name, params={"role_description": description}, **kwargs).read()
+        return con.put_form_data(entity_uri_segment="Role/" + name,
+                                 params={"role_description": description},
+                                 **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to update this role."
         raise
@@ -301,8 +310,10 @@ def _set_roles(username, roles, realm=None, **kwargs):
     body = xml2str(xml)
     con = get_connection()
     try:
-        body = con._http_request(method="PUT", path="UserRoles/" + (realm + "/" +
-                                                                    username if realm is not None else username), body=body, **kwargs).read()
+        body = con._http_request(method="PUT",
+                                 path="UserRoles/" + (realm + "/" +
+                                                      username if realm is not None else username),
+                                 body=body, **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to set this user's roles."
         raise
@@ -369,7 +380,8 @@ def _set_permissions(role, permission_rules, **kwargs):
     body = xml2str(xml)
     con = get_connection()
     try:
-        return con._http_request(method="PUT", path="PermissionRules/" + role, body=body, **kwargs).read()
+        return con._http_request(method="PUT", path="PermissionRules/" + role, body=body,
+                                 **kwargs).read()
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to set this role's permissions."
         raise
@@ -381,7 +393,9 @@ def _set_permissions(role, permission_rules, **kwargs):
 def _get_permissions(role, **kwargs):
     con = get_connection()
     try:
-        return PermissionRule._parse_body(con._http_request(method="GET", path="PermissionRules/" + role, **kwargs).read())
+        return PermissionRule._parse_body(con._http_request(method="GET",
+                                                            path="PermissionRules/" + role,
+                                                            **kwargs).read())
     except HTTPForbiddenError as e:
         e.msg = "You are not permitted to retrieve this role's permissions."
         raise
@@ -429,7 +443,8 @@ class PermissionRule():
         if permission is None:
             raise ValueError(f"Permission is missing in PermissionRule xml: {elem}")
         priority = PermissionRule._parse_boolean(elem.get("priority"))
-        return PermissionRule(elem.tag, permission, priority if priority is not None else False)
+        return PermissionRule(str(elem.tag), permission,
+                              priority if priority is not None else False)
 
     @staticmethod
     def _parse_body(body: str):
diff --git a/src/linkahead/common/models.py b/src/linkahead/common/models.py
index fe2be59ad5b042db23cc0a567476bf9aa03d99ad..e5f378560405be5b46aa2c871fc315d70d7d10d7 100644
--- a/src/linkahead/common/models.py
+++ b/src/linkahead/common/models.py
@@ -41,7 +41,6 @@ import warnings
 from builtins import str
 from copy import deepcopy
 from datetime import date, datetime
-from enum import Enum
 from functools import cmp_to_key
 from hashlib import sha512
 from os import listdir
@@ -350,6 +349,15 @@ class Entity:
     def pickup(self, new_pickup):
         self.__pickup = new_pickup
 
+    @property   # getter for _cuid
+    def cuid(self):
+        # Set if None?
+        return self._cuid
+
+    @property   # getter for _flags
+    def flags(self):
+        return self._flags.copy()   # for dict[str, str] shallow copy is enough
+
     def grant(
         self,
         realm: Optional[str] = None,
@@ -497,6 +505,10 @@ class Entity:
 
         return self
 
+    def get_versionid(self):
+        """Returns the concatenation of ID and version"""
+        return str(self.id)+"@"+str(self.version.id)
+
     def get_importance(self, property):  # @ReservedAssignment
         """Get the importance of a given property regarding this entity."""
 
@@ -946,7 +958,7 @@ class Entity:
 
         Parameters
         ----------
-        all_parents: list 
+        all_parents: list
           The added parents so far.
 
         Returns
@@ -1139,7 +1151,7 @@ class Entity:
         else:
             return getattr(ref, special_selector.lower())
 
-    def get_property_values(self, *selectors):
+    def get_property_values(self, *selectors) -> tuple:
         """ Return a tuple with the values described by the given selectors.
 
         This represents an entity's properties as if it was a row of a table
@@ -1267,6 +1279,7 @@ class Entity:
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[list] = None
     ) -> etree._Element:
         """Generate an xml representation of this entity. If the parameter xml
         is given, all attributes, parents, properties, and messages of this
@@ -1274,9 +1287,20 @@ class Entity:
 
         Raise an error if xml is not a lxml.etree.Element
 
-        @param xml: an xml element to which all attributes, parents,
-            properties, and messages
-            are to be added.
+        Parameters
+        ----------
+        xml : etree._Element, optional
+            an xml element to which all attributes, parents,
+            properties, and messages are to be added. Default is None.
+        visited_entities : list, optional
+            list of enties that are being printed for recursion check,
+            should never be set manually. Default is None.
+        add_properties : INHERITANCE, optional
+            FIXME: Add documentation for the add_properties
+            parameter. Default is "ALL".
+        local_serialization : bool, optional
+            FIXME: Add documentation for the local_serialization
+            parameter. Default is False.
 
         FIXME: Add documentation for the add_properties parameter.
         FIXME: Add docuemntation for the local_serialization parameter.
@@ -1293,9 +1317,17 @@ class Entity:
             xml = etree.Element(elem_tag)
         assert isinstance(xml, etree._Element)
 
+        if visited_entities is None:
+            visited_entities = []
+        if self in visited_entities:
+            xml.text = xml2str(etree.Comment("Recursive reference"))
+            return xml
+        visited_entities.append(self)
+
         # unwrap wrapped entity
         if self._wrapped_entity is not None:
-            xml = self._wrapped_entity.to_xml(xml, add_properties)
+            xml = self._wrapped_entity.to_xml(xml, add_properties,
+                                              visited_entities=visited_entities.copy())
 
         if self.id is not None:
             xml.set("id", str(self.id))
@@ -1310,6 +1342,10 @@ class Entity:
             xml.set("description", str(self.description))
 
         if self.version is not None:
+            # If this ever causes problems, we might add
+            # visited_entities support here since it does have some
+            # recursion with predecessors / successors. But should be
+            # fine for now, since it is always set by the server.
             xml.append(self.version.to_xml())
 
         if self.value is not None:
@@ -1319,7 +1355,8 @@ class Entity:
                 elif self.value.name is not None:
                     xml.text = str(self.value.name)
                 else:
-                    xml.text = str(self.value)
+                    dt_str = xml2str(self.value.to_xml(visited_entities=visited_entities.copy()))
+                    xml.text = dt_str
             elif isinstance(self.value, list):
                 for v in self.value:
                     v_elem = etree.Element("Value")
@@ -1330,7 +1367,8 @@ class Entity:
                         elif v.name is not None:
                             v_elem.text = str(v.name)
                         else:
-                            v_elem.text = str(v)
+                            dt_str = xml2str(v.to_xml(visited_entities=visited_entities.copy()))
+                            v_elem.text = dt_str
                     elif v == "":
                         v_elem.append(etree.Element("EmptyString"))
                     elif v is None:
@@ -1352,7 +1390,11 @@ class Entity:
                 elif self.datatype.name is not None:
                     xml.set("datatype", str(self.datatype.name))
                 else:
-                    xml.set("datatype", str(self.datatype))
+                    dt_str = xml2str(self.datatype.to_xml(visited_entities=visited_entities.copy()))
+                    # Todo: Use for pretty-printing with calls from _repr_ only?
+                    # dt_str = dt_str.replace('<', 'ᐸ').replace('>', 'ᐳ').replace(' ', '⠀').replace(
+                    # '"', '\'').replace('\n', '')
+                    xml.set("datatype", dt_str)
             else:
                 xml.set("datatype", str(self.datatype))
 
@@ -1375,10 +1417,11 @@ class Entity:
             self.messages.to_xml(xml)
 
         if self.parents is not None:
-            self.parents.to_xml(xml)
+            self.parents.to_xml(xml, visited_entities=visited_entities.copy())
 
         if self.properties is not None:
-            self.properties.to_xml(xml, add_properties)
+            self.properties.to_xml(xml, add_properties,
+                                   visited_entities=visited_entities.copy())
 
         if len(self._flags) > 0:
             flagattr = ""
@@ -1548,7 +1591,7 @@ class Entity:
 
         Parameters
         ----------
-        unique : bool =True 
+        unique : bool =True
            flag to suppress the ambiguity exception.
 
         Returns
@@ -1906,12 +1949,12 @@ class QueryTemplate():
 
     @staticmethod
     def _from_xml(xml: etree._Element):
-        if xml.tag.lower() == "querytemplate":
+        if str(xml.tag).lower() == "querytemplate":
             q = QueryTemplate(name=xml.get("name"),
                               description=xml.get("description"), query=None)
 
             for e in xml:
-                if e.tag.lower() == "query":
+                if str(e.tag).lower() == "query":
                     q.query = e.text
                 else:
                     child = _parse_single_xml_element(e)
@@ -1948,7 +1991,7 @@ class QueryTemplate():
         ret = Messages()
 
         for m in self.messages:
-            if m.type.lower() == "error":
+            if str(m.type).lower() == "error":
                 ret.append(m)
 
         return ret
@@ -1994,11 +2037,16 @@ class Parent(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "NONE",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Parent")
 
-        return super().to_xml(xml=xml, add_properties=add_properties)
+        if visited_entities is None:
+            visited_entities = []
+
+        return super().to_xml(xml=xml, add_properties=add_properties,
+                              visited_entities=visited_entities)
 
 
 class _EntityWrapper(object):
@@ -2069,14 +2117,19 @@ class Property(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Property")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return super(Property, self).to_xml(
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities,
         )
 
     def is_reference(self, server_retrieval: bool = False) -> Optional[bool]:
@@ -2234,15 +2287,20 @@ class RecordType(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ) -> etree._Element:
         if xml is None:
             xml = etree.Element("RecordType")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return Entity.to_xml(
             self,
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities,
         )
 
 
@@ -2273,14 +2331,19 @@ class Record(Entity):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ):
         if xml is None:
             xml = etree.Element("Record")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return super().to_xml(
             xml=xml,
             add_properties=add_properties,
             local_serialization=local_serialization,
+            visited_entities=visited_entities
         )
 
 
@@ -2361,6 +2424,7 @@ class File(Record):
         xml: Optional[etree._Element] = None,
         add_properties: INHERITANCE = "ALL",
         local_serialization: bool = False,
+        visited_entities: Optional[Union[list, None]] = None,
     ) -> etree._Element:
         """Convert this file to an xml element.
 
@@ -2373,8 +2437,12 @@ class File(Record):
         if xml is None:
             xml = etree.Element("File")
 
+        if visited_entities is None:
+            visited_entities = []
+
         return Entity.to_xml(self, xml=xml, add_properties=add_properties,
-                             local_serialization=local_serialization)
+                             local_serialization=local_serialization,
+                             visited_entities=visited_entities)
 
     def download(self, target: Optional[str] = None) -> str:
         """Download this file-entity's actual file from the file server. It
@@ -2382,7 +2450,7 @@ class File(Record):
 
         Parameters
         ----------
-        target: Optional[str] 
+        target: Optional[str]
             Where to store this file.
 
         Returns
@@ -2475,7 +2543,7 @@ class PropertyList(list):
     This class provides addional functionality like get/set_importance or get_by_name.
     """
 
-    def __init__(self):
+    def __init__(self) -> None:
         super().__init__()
         self._importance: dict[Entity, IMPORTANCE] = dict()
         self._inheritance: dict[Entity, INHERITANCE] = dict()
@@ -2549,15 +2617,20 @@ class PropertyList(list):
 
         return self
 
-    def to_xml(self, add_to_element: etree._Element, add_properties: INHERITANCE):
+    def to_xml(self, add_to_element: etree._Element, add_properties: INHERITANCE,
+               visited_entities: Optional[Union[list, None]] = None):
+
+        if visited_entities is None:
+            visited_entities = []
+
         p: Property
         for p in self:
             importance = self._importance.get(p)
 
             if add_properties == FIX and not importance == FIX:
                 continue
-
-            pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX)
+            pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX,
+                             visited_entities=visited_entities.copy())
 
             if p in self._importance:
                 pelem.set("importance", str(importance))
@@ -2575,21 +2648,23 @@ class PropertyList(list):
 
         return xml2str(xml)
 
-    def filter(self, prop: Optional[Property] = None,
-               pid: Union[None, str, int] = None,
-               name: Optional[str] = None,
-               conjunction: bool = False) -> list:
+    def filter(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This function was renamed to filter_by_identity."))
+        return self.filter_by_identity(*args, **kwargs)
+
+    def filter_by_identity(self, prop: Optional[Property] = None,
+                           pid: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
         """
         Return all Properties from the given PropertyList that match the
         selection criteria.
 
-        Please refer to the documentation of _filter_entity_list for a detailed
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
         description of behaviour.
 
         Params
         ------
-        listobject        : Iterable(Property)
-                            List to be filtered
         prop              : Property
                             Property to match name and ID with. Cannot be set
                             simultaneously with ID or name.
@@ -2606,8 +2681,8 @@ class PropertyList(list):
         matches          : list
                            List containing all matching Properties
         """
-        return _filter_entity_list(self, pid=pid, name=name, entity=prop,
-                                   conjunction=conjunction)
+        return _filter_entity_list_by_identity(self, pid=pid, name=name, entity=prop,
+                                               conjunction=conjunction)
 
     def _get_entity_by_cuid(self, cuid: str):
         '''
@@ -2615,7 +2690,7 @@ class PropertyList(list):
         Note: this method is intended for internal use.
         Parameters
         ----------
-        name: str 
+        name: str
             The cuid of the entity to be returned.
         Returns
         -------
@@ -2684,7 +2759,7 @@ class ParentList(list):
 
         Returns
         -------
-        Entity 
+        Entity
             Entity with the given cuid.
         '''
 
@@ -2717,7 +2792,12 @@ class ParentList(list):
 
         return self
 
-    def to_xml(self, add_to_element: etree._Element):
+    def to_xml(self, add_to_element: etree._Element,
+               visited_entities: Optional[Union[list, None]] = None):
+
+        if visited_entities is None:
+            visited_entities = []
+
         for p in self:
             pelem = etree.Element("Parent")
 
@@ -2752,15 +2832,19 @@ class ParentList(list):
 
         return xml2str(xml)
 
-    def filter(self, parent: Optional[Parent] = None,
-               pid: Union[None, str, int] = None,
-               name: Optional[str] = None,
-               conjunction: bool = False) -> list:
+    def filter(self, *args, **kwargs):
+        warnings.warn(DeprecationWarning("This function was renamed to filter_by_identity."))
+        return self.filter_by_identity(*args, **kwargs)
+
+    def filter_by_identity(self, parent: Optional[Parent] = None,
+                           pid: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
         """
         Return all Parents from the given ParentList that match the selection
         criteria.
 
-        Please refer to the documentation of _filter_entity_list for a detailed
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
         description of behaviour.
 
         Params
@@ -2783,8 +2867,8 @@ class ParentList(list):
         matches          : list
                            List containing all matching Parents
         """
-        return _filter_entity_list(self, pid=pid, name=name, entity=parent,
-                                   conjunction=conjunction)
+        return _filter_entity_list_by_identity(self, pid=pid, name=name, entity=parent,
+                                               conjunction=conjunction)
 
     def remove(self, parent: Union[Entity, int, str]):
         """
@@ -2818,7 +2902,7 @@ class ParentList(list):
                     # by name
 
                     for e in self:
-                        if e.name is not None and e.name.lower() == parent.name.lower():
+                        if e.name is not None and str(e.name).lower() == str(parent.name).lower():
                             list.remove(self, e)
 
                             return
@@ -3114,12 +3198,12 @@ def _basic_sync(e_local, e_remote):
     if e_local.role is None:
         e_local.role = e_remote.role
     elif e_remote.role is not None and not e_local.role.lower() == e_remote.role.lower():
-        raise ValueError("The resulting entity had a different role ({0}) "
-                         "than the local one ({1}). This probably means, that "
+        raise ValueError(f"The resulting entity had a different role ({e_remote.role}) "
+                         f"than the local one ({e_local.role}). This probably means, that "
                          "the entity was intialized with a wrong class "
                          "by this client or it has changed in the past and "
-                         "this client did't know about it yet.".format(
-                             e_remote.role, e_local.role))
+                         "this client did't know about it yet.\nThis is the local version of the"
+                         f" Entity:\n{e_local}\nThis is the remote one:\n{e_remote}")
 
     e_local.id = e_remote.id
     e_local.name = e_remote.name
@@ -3290,7 +3374,7 @@ class Container(list):
             Do a case-sensitive search for name (or not).
         Returns
         -------
-        Entity 
+        Entity
             Entity with the given name.
         """
 
@@ -3388,6 +3472,7 @@ class Container(list):
 
         if add_to_element is None:
             add_to_element = etree.Element("Entities")
+        noscript_in_supplied_xml = list(add_to_element.iter("noscript", "TransactionBenchmark"))
 
         for m in self.messages:
             add_to_element.append(m.to_xml())
@@ -3404,6 +3489,13 @@ class Container(list):
                 elem = e.to_xml()
             add_to_element.append(elem)
 
+        # remove noscript and benchmark elements added by this function
+        for elem in list(add_to_element.iter("noscript", "TransactionBenchmark")):
+            if elem not in noscript_in_supplied_xml:
+                parent = elem.getparent()
+                if parent is not None:
+                    parent.remove(elem)
+
         return add_to_element
 
     def get_errors(self):
@@ -3431,7 +3523,7 @@ class Container(list):
 
         Returns
         -------
-        Messages 
+        Messages
             Warning messages.
         """
 
@@ -3463,7 +3555,7 @@ class Container(list):
         '''
         Returns
         -------
-        bool 
+        bool
             True if and only if this container has any warning messages.
         '''
 
@@ -3777,6 +3869,37 @@ class Container(list):
 
         return sync_dict
 
+    def filter_by_identity(self, entity: Optional[Entity] = None,
+                           entity_id: Union[None, str, int] = None,
+                           name: Optional[str] = None,
+                           conjunction: bool = False) -> list:
+        """
+        Return all Entities from this Container that match the selection criteria.
+
+        Please refer to the documentation of _filter_entity_list_by_identity for a detailed
+        description of behaviour.
+
+        Params
+        ------
+        entity            : Entity
+                            Entity to match name and ID with
+        entity_id         : str, int
+                            Parent ID to match
+        name              : str
+                            Parent name to match
+                            simultaneously with ID or name.
+        conjunction       : bool, defaults to False
+                            Set to return only entities that match both id and name
+                            if both are given.
+
+        Returns
+        -------
+        matches          : list
+                           List containing all matching Entities
+        """
+        return _filter_entity_list_by_identity(self, pid=entity_id, name=name, entity=entity,
+                                               conjunction=conjunction)
+
     @staticmethod
     def _find_dependencies_in_container(container: Container):
         """Find elements in a container that are a dependency of another element of the same.
@@ -4655,7 +4778,7 @@ class ACL():
         return len(self._grants) + len(self._priority_grants) + \
             len(self._priority_denials) + len(self._denials) == 0
 
-    def clear(self):
+    def clear(self) -> None:
         self._grants: set[ACI] = set()
         self._denials: set[ACI] = set()
         self._priority_grants: set[ACI] = set()
@@ -4997,7 +5120,7 @@ class Query():
             self.etag = q.get("etag")
 
             for m in q:
-                if m.tag.lower() == 'warning' or m.tag.lower() == 'error':
+                if str(m.tag).lower() == 'warning' or str(m.tag).lower() == 'error':
                     self.messages.append(_parse_single_xml_element(m))
         else:
             self.q = q
@@ -5204,13 +5327,13 @@ class DropOffBox(list):
         xml = etree.fromstring(body)
 
         for child in xml:
-            if child.tag.lower() == "stats":
+            if str(child.tag).lower() == "stats":
                 infoelem = child
 
                 break
 
         for child in infoelem:
-            if child.tag.lower() == "dropoffbox":
+            if str(child.tag).lower() == "dropoffbox":
                 dropoffboxelem = child
 
                 break
@@ -5259,7 +5382,7 @@ class Info():
 
     """
 
-    def __init__(self):
+    def __init__(self) -> None:
         self.messages = Messages()
         self.user_info: Optional[UserInfo] = None
         self.time_zone: Optional[TimeZone] = None
@@ -5365,7 +5488,7 @@ def parse_xml(xml: Union[str, etree._Element]):
 
     Parameters
     ----------
-    xml: Union[str, etree._Element] 
+    xml: Union[str, etree._Element]
        a string or tree representation of an xml document.
     Returns
     -------
@@ -5391,36 +5514,36 @@ def _parse_single_xml_element(elem: etree._Element):
         "entity": Entity,
     }
 
-    if elem.tag.lower() in classmap:
-        klass = classmap.get(elem.tag.lower())
+    if str(elem.tag).lower() in classmap:
+        klass = classmap.get(str(elem.tag).lower())
         if klass is None:
-            raise LinkAheadException("No class for tag '{}' found.".format(elem.tag))
+            raise LinkAheadException("No class for tag '{}' found.".format(str(elem.tag)))
         entity = klass()
         Entity._from_xml(entity, elem)
 
         return entity
-    elif elem.tag.lower() == "version":
+    elif str(elem.tag).lower() == "version":
         return Version.from_xml(elem)
-    elif elem.tag.lower() == "state":
+    elif str(elem.tag).lower() == "state":
         return State.from_xml(elem)
-    elif elem.tag.lower() == "emptystring":
+    elif str(elem.tag).lower() == "emptystring":
         return ""
-    elif elem.tag.lower() == "value":
-        if len(elem) == 1 and elem[0].tag.lower() == "emptystring":
+    elif str(elem.tag).lower() == "value":
+        if len(elem) == 1 and str(elem[0].tag).lower() == "emptystring":
             return ""
-        elif len(elem) == 1 and elem[0].tag.lower() in classmap:
+        elif len(elem) == 1 and str(elem[0].tag).lower() in classmap:
             return _parse_single_xml_element(elem[0])
         elif elem.text is None or elem.text.strip() == "":
             return None
 
         return str(elem.text.strip())
-    elif elem.tag.lower() == "querytemplate":
+    elif str(elem.tag).lower() == "querytemplate":
         return QueryTemplate._from_xml(elem)
-    elif elem.tag.lower() == 'query':
+    elif str(elem.tag).lower() == 'query':
         return Query(elem)
-    elif elem.tag.lower() == 'history':
+    elif str(elem.tag).lower() == 'history':
         return Message(type='History', description=elem.get("transaction"))
-    elif elem.tag.lower() == 'stats':
+    elif str(elem.tag).lower() == 'stats':
         counts = elem.find("counts")
         if counts is None:
             raise LinkAheadException("'stats' element without a 'count' found.")
@@ -5440,7 +5563,7 @@ def _parse_single_xml_element(elem: etree._Element):
     else:
         code = elem.get("code")
         return Message(
-            type=elem.tag,
+            type=str(elem.tag),
             code=int(code) if code is not None else None,
             description=elem.get("description"),
             body=elem.text,
@@ -5590,11 +5713,11 @@ def delete(ids: Union[list[int], range], raise_exception_on_error: bool = True):
     return c.delete(raise_exception_on_error=raise_exception_on_error)
 
 
-def _filter_entity_list(listobject: list[Entity],
-                        entity: Optional[Entity] = None,
-                        pid: Union[None, str, int] = None,
-                        name: Optional[str] = None,
-                        conjunction: bool = False) -> list:
+def _filter_entity_list_by_identity(listobject: list[Entity],
+                                    entity: Optional[Entity] = None,
+                                    pid: Union[None, str, int] = None,
+                                    name: Optional[str] = None,
+                                    conjunction: bool = False) -> list:
     """
     Returns a subset of entities from the list based on whether their id and
     name matches the selection criterion.
@@ -5676,3 +5799,18 @@ def _filter_entity_list(listobject: list[Entity],
             if pid_none and name_match:
                 matches.append(candidate)
     return matches
+
+
+def value_matches_versionid(value: Union[int, str]):
+    """Returns True if the value matches the pattern <id>@<version>"""
+    if isinstance(value, int):
+        return False
+    if not isinstance(value, str):
+        raise ValueError(f"A reference value needs to be int or str. It was {type(value)}. "
+                         "Did you call value_matches_versionid on a non reference value?")
+    return "@" in value
+
+
+def get_id_from_versionid(versionid: str):
+    """Returns the ID part of the versionid with the pattern <id>@<version>"""
+    return versionid.split("@")[0]
diff --git a/src/linkahead/common/state.py b/src/linkahead/common/state.py
index e352f82d9820620d1692cb6337eb218210e799e6..b708ca13cb0a648aa2ca00507f39a531e4f55d14 100644
--- a/src/linkahead/common/state.py
+++ b/src/linkahead/common/state.py
@@ -20,11 +20,11 @@
 # ** end header
 
 from __future__ import annotations  # Can be removed with 3.10.
-import copy
-from lxml import etree
 
+import copy
 from typing import TYPE_CHECKING
-import sys
+
+from lxml import etree
 
 if TYPE_CHECKING:
     from typing import Optional
@@ -87,7 +87,8 @@ class Transition:
         return self._to_state
 
     def __repr__(self):
-        return f'Transition(name="{self.name}", from_state="{self.from_state}", to_state="{self.to_state}", description="{self.description}")'
+        return (f'Transition(name="{self.name}", from_state="{self.from_state}", '
+                f'to_state="{self.to_state}", description="{self.description}")')
 
     def __eq__(self, other):
         return (
@@ -103,9 +104,9 @@ class Transition:
     @staticmethod
     def from_xml(xml: etree._Element) -> "Transition":
         to_state = [to.get("name")
-                    for to in xml if to.tag.lower() == "tostate"]
+                    for to in xml if str(to.tag).lower() == "tostate"]
         from_state = [
-            from_.get("name") for from_ in xml if from_.tag.lower() == "fromstate"
+            from_.get("name") for from_ in xml if str(from_.tag).lower() == "fromstate"
         ]
         return Transition(
             name=xml.get("name"),
@@ -199,7 +200,7 @@ class State:
         result._id = xml.get("id")
         result._description = xml.get("description")
         transitions = [
-            Transition.from_xml(t) for t in xml if t.tag.lower() == "transition"
+            Transition.from_xml(t) for t in xml if str(t.tag).lower() == "transition"
         ]
         if transitions:
             result._transitions = set(transitions)
diff --git a/src/linkahead/common/versioning.py b/src/linkahead/common/versioning.py
index 11cf5f6904b02954eb0b2bddc16478590df167e7..1c2999df8174e239a470cfc637533c3c8c302c33 100644
--- a/src/linkahead/common/versioning.py
+++ b/src/linkahead/common/versioning.py
@@ -101,11 +101,14 @@ class Version():
     # pylint: disable=redefined-builtin
     def __init__(self, id: Optional[str] = None, date: Optional[str] = None,
                  username: Optional[str] = None, realm: Optional[str] = None,
-                 predecessors: Optional[List[Version]] = None, successors: Optional[List[Version]] = None,
+                 predecessors: Optional[List[Version]] = None,
+                 successors: Optional[List[Version]] = None,
                  is_head: Union[bool, str, None] = False,
                  is_complete_history: Union[bool, str, None] = False):
-        """Typically the `predecessors` or `successors` should not "link back" to an existing Version
-        object."""
+        """Typically the `predecessors` or `successors` should not "link back" to an existing
+        Version object.
+
+        """
         self.id = id
         self.date = date
         self.username = username
@@ -205,8 +208,8 @@ class Version():
         version : Version
             a new version instance
         """
-        predecessors = [Version.from_xml(p) for p in xml if p.tag.lower() == "predecessor"]
-        successors = [Version.from_xml(s) for s in xml if s.tag.lower() == "successor"]
+        predecessors = [Version.from_xml(p) for p in xml if str(p.tag).lower() == "predecessor"]
+        successors = [Version.from_xml(s) for s in xml if str(s.tag).lower() == "successor"]
         return Version(id=xml.get("id"), date=xml.get("date"),
                        is_head=xml.get("head"),
                        is_complete_history=xml.get("completeHistory"),
diff --git a/src/linkahead/configuration.py b/src/linkahead/configuration.py
index f57289d7dcb6d7ab062024dc697dbda557670d7a..5081c28af253d3da31926ab1c9449309cc171c4f 100644
--- a/src/linkahead/configuration.py
+++ b/src/linkahead/configuration.py
@@ -30,6 +30,15 @@ import yaml
 try:
     optional_jsonschema_validate: Optional[Callable] = None
     from jsonschema import validate as optional_jsonschema_validate
+
+    # Adapted from https://github.com/python-jsonschema/jsonschema/issues/148
+    # Defines Validator to allow parsing of all iterables as array in jsonschema
+    # CustomValidator can be removed if/once jsonschema allows tuples for arrays
+    from collections.abc import Iterable
+    from jsonschema import validators
+    default = validators.validator_for(True)   # Returns latest supported draft
+    t_c = (default.TYPE_CHECKER.redefine('array', lambda x, y: isinstance(y, Iterable)))
+    CustomValidator = validators.extend(default, type_checker=t_c)
 except ImportError:
     pass
 
@@ -72,14 +81,40 @@ def get_config() -> ConfigParser:
     return _pycaosdbconf
 
 
-def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str, bool]]]:
-    valobj: dict[str, dict[str, Union[int, str, bool]]] = {}
+def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str, bool, tuple, None]]]:
+    """
+    Generates and returns a dict with all config options and their values
+    defined in the config.
+    The values of the options 'debug', 'timeout', and 'ssl_insecure' are
+    parsed, all other values are saved as string.
+
+    Parameters
+    ----------
+    config : ConfigParser
+        The config to be converted to a dict
+
+    Returns
+    -------
+    valobj : dict
+        A dict with config options and their values as key value pairs
+    """
+    valobj: dict[str, dict[str, Union[int, str, bool, tuple, None]]] = {}
     for s in config.sections():
         valobj[s] = {}
         for key, value in config[s].items():
             # TODO: Can the type be inferred from the config object?
-            if key in ["timeout", "debug"]:
+            if key in ["debug"]:
                 valobj[s][key] = int(value)
+            elif key in ["timeout"]:
+                value = "".join(value.split())          # Remove whitespace
+                if str(value).lower() in ["none", "null"]:
+                    valobj[s][key] = None
+                elif value.startswith('(') and value.endswith(')'):
+                    content = [None if str(s).lower() in ["none", "null"] else int(s)
+                               for s in value[1:-1].split(',')]
+                    valobj[s][key] = tuple(content)
+                else:
+                    valobj[s][key] = int(value)
             elif key in ["ssl_insecure"]:
                 valobj[s][key] = bool(value)
             else:
@@ -88,11 +123,12 @@ def config_to_yaml(config: ConfigParser) -> dict[str, dict[str, Union[int, str,
     return valobj
 
 
-def validate_yaml_schema(valobj: dict[str, dict[str, Union[int, str, bool]]]):
+def validate_yaml_schema(valobj: dict[str, dict[str, Union[int, str, bool, tuple, None]]]):
     if optional_jsonschema_validate:
         with open(os.path.join(os.path.dirname(__file__), "schema-pycaosdb-ini.yml")) as f:
             schema = yaml.load(f, Loader=yaml.SafeLoader)
-        optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"])
+        optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"],
+                                     cls=CustomValidator)
     else:
         warnings.warn("""
             Warning: The validation could not be performed because `jsonschema` is not installed.
diff --git a/src/linkahead/connection/authentication/interface.py b/src/linkahead/connection/authentication/interface.py
index b48e27c08312bf1358d32a9a1203627a9d0007c2..8288880583dc58fc82ab03d371861f067406b3d3 100644
--- a/src/linkahead/connection/authentication/interface.py
+++ b/src/linkahead/connection/authentication/interface.py
@@ -125,8 +125,9 @@ class AbstractAuthenticator(ABC):
         Returns
         -------
         """
-        self.auth_token = parse_auth_token(
-            response.getheader("Set-Cookie"))
+        new_token = parse_auth_token(response.getheader("Set-Cookie"))
+        if new_token is not None:
+            self.auth_token = new_token
 
     def on_request(self, method: str, path: str, headers: QueryDict, **kwargs):
         # pylint: disable=unused-argument
@@ -190,7 +191,7 @@ class CredentialsAuthenticator(AbstractAuthenticator):
     def _logout(self):
         self.logger.debug("[LOGOUT]")
         if self.auth_token is not None:
-            self._connection.request(method="DELETE", path="logout")
+            self._connection.request(method="GET", path="logout")
         self.auth_token = None
 
     def _login(self):
diff --git a/src/linkahead/connection/connection.py b/src/linkahead/connection/connection.py
index c95134fed3fd6b031b01b518c6362bf3b371c960..fe99b421ee9d5bc3bc158af6b7f4882232db4d97 100644
--- a/src/linkahead/connection/connection.py
+++ b/src/linkahead/connection/connection.py
@@ -39,7 +39,7 @@ from requests.adapters import HTTPAdapter
 from requests.exceptions import ConnectionError as HTTPConnectionError
 from urllib3.poolmanager import PoolManager
 
-from ..configuration import get_config
+from ..configuration import get_config, config_to_yaml
 from ..exceptions import (ConfigurationError, HTTPClientError,
                           HTTPForbiddenError, HTTPResourceNotFoundError,
                           HTTPServerError, HTTPURITooLongError,
@@ -47,7 +47,7 @@ from ..exceptions import (ConfigurationError, HTTPClientError,
                           LoginFailedError)
 
 try:
-    from ..version import version
+    from ..version import version               # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
 
@@ -56,11 +56,12 @@ from .interface import CaosDBHTTPResponse, CaosDBServerConnection
 from .utils import make_uri_path, urlencode
 
 from typing import TYPE_CHECKING
+from .authentication.interface import CredentialsAuthenticator
 if TYPE_CHECKING:
     from typing import Optional, Any, Iterator, Union
     from requests.models import Response
-    from ssl import _SSLMethod
-    from .authentication.interface import AbstractAuthenticator, CredentialsAuthenticator
+    from ssl import _SSLMethod              # pylint: disable=no-name-in-module
+    from .authentication.interface import AbstractAuthenticator
 
 
 _LOGGER = logging.getLogger(__name__)
@@ -83,8 +84,10 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
         return self.response.status_code
 
     def read(self, size: Optional[int] = None):
+        # FIXME This function behaves unexpectedly if `size` is larger than in the first run.
+
         if self._stream_consumed is True:
-            raise RuntimeError("Stream is consumed")
+            raise BufferError("Stream is consumed")
 
         if self._buffer is None:
             # the buffer has been drained in the previous call.
@@ -97,14 +100,14 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
             return self.response.content
 
         if size is None or size == 0:
-            raise RuntimeError(
-                "size parameter should not be None if the stream is not consumed yet")
+            raise BufferError(
+                "`size` parameter can not be None or zero once reading has started with a non-zero "
+                "value.")
 
         if len(self._buffer) >= size:
             # still enough bytes in the buffer
-            # FIXME: `chunk`` is used before definition
-            result = chunk[:size]
-            self._buffer = chunk[size:]
+            result = self._buffer[:size]
+            self._buffer = self._buffer[size:]
             return result
 
         if self._generator is None:
@@ -116,16 +119,16 @@ class _WrappedHTTPResponse(CaosDBHTTPResponse):
         try:
             # read new data into the buffer
             chunk = self._buffer + next(self._generator)
-            result = chunk[:size]
+            result = chunk[:size]  # FIXME what if `size` is larger than at `iter_content(size)`?
             if len(result) == 0:
                 self._stream_consumed = True
             self._buffer = chunk[size:]
             return result
         except StopIteration:
             # drain buffer
-            result = self._buffer
+            last_result = self._buffer
             self._buffer = None
-            return result
+            return last_result
 
     def getheader(self, name: str, default=None):
         return self.response.headers[name] if name in self.response.headers else default
@@ -218,7 +221,7 @@ class _DefaultCaosDBServerConnection(CaosDBServerConnection):
                 "Connection failed. Network or server down? " + str(conn_err)
             )
 
-    def configure(self, **config):
+    def configure(self, **config) -> None:
         """configure.
 
         Configure the http connection.
@@ -420,8 +423,10 @@ def configure_connection(**kwargs):
         - "keyring"  Uses the `keyring` library.
         - "auth_token" Uses only a given auth_token.
 
-    timeout : int
+    timeout : int, tuple, or None
         A connection timeout in seconds. (Default: 210)
+        If a tuple is given, they are used as connect and read timeouts
+        respectively, timeout None disables the timeout.
 
     ssl_insecure : bool
         Whether SSL certificate warnings should be ignored. Only use this for
@@ -463,21 +468,29 @@ def configure_connection(**kwargs):
     global_conf = {}
     conf = get_config()
     # Convert config to dict, with preserving types
-    int_opts = ["timeout"]
+    int_opts = []
     bool_opts = ["ssl_insecure"]
+    other_opts = ["timeout"]
 
     if conf.has_section("Connection"):
         global_conf = dict(conf.items("Connection"))
-        # Integer options
 
+        # Integer options
         for opt in int_opts:
             if opt in global_conf:
                 global_conf[opt] = conf.getint("Connection", opt)
-        # Boolean options
 
+        # Boolean options
         for opt in bool_opts:
             if opt in global_conf:
                 global_conf[opt] = conf.getboolean("Connection", opt)
+
+        # Other options, defer parsing to configuration.config_to_yaml:
+        connection_config = config_to_yaml(conf)["Connection"]
+        for opt in other_opts:
+            if opt in global_conf:
+                global_conf[opt] = connection_config[opt]
+
     local_conf = _make_conf(_DEFAULT_CONF, global_conf, kwargs)
 
     connection = _Connection.get_instance()
@@ -551,9 +564,9 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
 
     __instance = None
 
-    def __init__(self):
+    def __init__(self) -> None:
         self._delegate_connection: Optional[CaosDBServerConnection] = None
-        self._authenticator: Optional[CredentialsAuthenticator] = None
+        self._authenticator: Optional[AbstractAuthenticator] = None
         self.is_configured = False
 
     @classmethod
@@ -563,7 +576,7 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
 
         return cls.__instance
 
-    def configure(self, **config):
+    def configure(self, **config) -> _Connection:
         self.is_configured = True
 
         if "implementation" not in config:
@@ -571,8 +584,7 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
                 "Missing CaosDBServerConnection implementation. You did not "
                 "specify an `implementation` for the connection.")
         try:
-            self._delegate_connection: CaosDBServerConnection = config["implementation"](
-            )
+            self._delegate_connection = config["implementation"]()
 
             if not isinstance(self._delegate_connection,
                               CaosDBServerConnection):
@@ -762,6 +774,7 @@ class _Connection(object):  # pylint: disable=useless-object-inheritance
         if self._authenticator is None:
             raise ValueError(
                 "No authenticator set. Please call configure_connection() first.")
+        assert isinstance(self._authenticator, CredentialsAuthenticator)
         if self._authenticator._credentials_provider is None:
             raise ValueError(
                 "No credentials provider set. Please call configure_connection() first.")
diff --git a/src/linkahead/connection/encode.py b/src/linkahead/connection/encode.py
index a76197803c9652e2d0c4e32819ee3e3f97758bfc..0cbb0b69f0a7b50244eb54c8dea7ef43ae713894 100644
--- a/src/linkahead/connection/encode.py
+++ b/src/linkahead/connection/encode.py
@@ -384,7 +384,7 @@ class MultipartYielder(object):
 
     # since python 3
     def __next__(self):
-        return self.next()
+        return self.next()                     # pylint: disable=not-callable
 
     def next(self):
         """generator function to yield multipart/form-data representation of
diff --git a/src/linkahead/connection/mockup.py b/src/linkahead/connection/mockup.py
index 9b69971c0409708f221c402f540fac85ff9c527e..d3bc13bb474a70d48446e8532607c3e11931ff05 100644
--- a/src/linkahead/connection/mockup.py
+++ b/src/linkahead/connection/mockup.py
@@ -75,7 +75,7 @@ class MockUpServerConnection(CaosDBServerConnection):
     just returns predefined responses which mimic the LinkAhead server."""
 
     def __init__(self):
-        self.resources = [self._login]
+        self.resources = [self._login, self._logout]
 
     def _login(self, method, path, headers, body):
         if method == "POST" and path == "login":
@@ -84,6 +84,12 @@ class MockUpServerConnection(CaosDBServerConnection):
                                            "mockup-auth-token"},
                                   body="")
 
+    def _logout(self, method, path, headers, body):
+        if method in ["DELETE", "GET"] and path == "logout":
+            return MockUpResponse(200,
+                                  headers={},
+                                  body="")
+
     def configure(self, **kwargs):
         """This configure method does nothing."""
 
diff --git a/src/linkahead/exceptions.py b/src/linkahead/exceptions.py
index 609d3654ac670a993185ba1faa33db921c44409c..7d4dc0850b811c0d696cc66252aa62541c6d3029 100644
--- a/src/linkahead/exceptions.py
+++ b/src/linkahead/exceptions.py
@@ -94,12 +94,26 @@ class HTTPServerError(LinkAheadException):
     """HTTPServerError represents 5xx HTTP server errors."""
 
     def __init__(self, body):
-        xml = etree.fromstring(body)
-        error = xml.xpath('/Response/Error')[0]
-        msg = error.get("description")
-
-        if error.text is not None:
-            msg = msg + "\n\n" + error.text
+        try:
+            # This only works if the server sends a valid XML
+            # response. Then it can be parsed for more information.
+            xml = etree.fromstring(body)
+            if xml.xpath('/Response/Error'):
+                error = xml.xpath('/Response/Error')[0]
+                msg = error.get("description") if error.get("description") is not None else ""
+
+                if error.text is not None:
+                    if msg:
+                        msg = msg + "\n\n" + error.text
+                    else:
+                        msg = error.text
+            else:
+                # Valid XML, but no error information
+                msg = body
+        except etree.XMLSyntaxError:
+            # Handling of incomplete responses, e.g., due to timeouts,
+            # c.f. https://gitlab.com/linkahead/linkahead-pylib/-/issues/87.
+            msg = body
         LinkAheadException.__init__(self, msg)
 
 
diff --git a/src/linkahead/high_level_api.py b/src/linkahead/high_level_api.py
index 18d219c732672d16d0ab43e562cfe73d682614fe..9aa59fb9187ff47e71c412568af50e1031c42fb7 100644
--- a/src/linkahead/high_level_api.py
+++ b/src/linkahead/high_level_api.py
@@ -26,11 +26,12 @@
 # type: ignore
 """
 A high level API for accessing LinkAhead entities from within python.
+This module is experimental, and may be changed or removed in the future.
 
 This is refactored from apiutils.
 """
 
-import warnings
+import logging
 from dataclasses import dataclass, fields
 from datetime import datetime
 from typing import Any, Dict, List, Optional, Union
@@ -44,7 +45,10 @@ from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER,
                               REFERENCE, TEXT, get_list_datatype,
                               is_list_datatype, is_reference)
 
-warnings.warn("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
+logger = logging.getLogger(__name__)
+
+
+logger.warning("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
 removed in the future. Its purpose is to give an impression on how the Python client user interface
 might be changed.""")
 
diff --git a/src/linkahead/schema-pycaosdb-ini.yml b/src/linkahead/schema-pycaosdb-ini.yml
index 89ce98570738fdd29dba81de25a2c022c1581467..ae46b905c62d2ab168229d92ff138937279c7aed 100644
--- a/src/linkahead/schema-pycaosdb-ini.yml
+++ b/src/linkahead/schema-pycaosdb-ini.yml
@@ -67,7 +67,13 @@ schema-pycaosdb-ini:
           description: This option is used internally and for testing. Do not override.
           examples: [_DefaultCaosDBServerConnection]
         timeout:
-          type: integer
+          oneOf:
+            - type: [integer, "null"]
+            - type: array
+              items:
+                type: [integer, "null"]
+              minItems: 2
+              maxItems: 2
       allOf:
         - if:
             properties:
diff --git a/src/linkahead/utils/git_utils.py b/src/linkahead/utils/git_utils.py
index 7a58272a3bef1930f75a1e08364349388e2bb89f..4824d619bfc77925add0c383f72360a644dd7833 100644
--- a/src/linkahead/utils/git_utils.py
+++ b/src/linkahead/utils/git_utils.py
@@ -36,9 +36,9 @@ logger = logging.getLogger(__name__)
 
 def get_origin_url_in(folder: str):
     """return the Fetch URL of the git repository in the given folder."""
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "remote", "show", "origin"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w", encoding="utf8") as tempf:
+        call(["git", "remote", "show", "origin"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r", encoding="utf8") as t:
         urlString = "Fetch URL:"
 
         for line in t.readlines():
@@ -63,9 +63,9 @@ def get_branch_in(folder: str):
     The command "git branch" is called in the given folder and the
     output is returned
     """
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w") as tempf:
+        call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r") as t:
         return t.readline().strip()
 
 
@@ -76,7 +76,7 @@ def get_commit_in(folder: str):
     and the output is returned
     """
 
-    with tempfile.NamedTemporaryFile(delete=False, mode="w") as t:
-        call(["git", "log", "-1", "--format=%h"], stdout=t, cwd=folder)
-    with open(t.name, "r") as t:
+    with tempfile.NamedTemporaryFile(delete=False, mode="w") as tempf:
+        call(["git", "log", "-1", "--format=%h"], stdout=tempf, cwd=folder)
+    with open(tempf.name, "r") as t:
         return t.readline().strip()
diff --git a/src/linkahead/utils/plantuml.py b/src/linkahead/utils/plantuml.py
index 19594d6e856e740fe2c58c5128eead31c37485ce..59e3c34dd04c2425aef46b6d9e2411f75b747aca 100644
--- a/src/linkahead/utils/plantuml.py
+++ b/src/linkahead/utils/plantuml.py
@@ -130,9 +130,9 @@ def recordtypes_to_plantuml_string(iterable,
 
     classes = [el for el in iterable
                if isinstance(el, db.RecordType)]
-    dependencies = {}
-    inheritances = {}
-    properties = [p for p in iterable if isinstance(p, db.Property)]
+    dependencies: dict = {}
+    inheritances: dict = {}
+    properties: list = [p for p in iterable if isinstance(p, db.Property)]
     grouped = [g for g in iterable if isinstance(g, Grouped)]
 
     def _add_properties(c, importance=None):
@@ -272,7 +272,8 @@ package \"The property P references an instance of D\" <<Rectangle>> {
     return result
 
 
-def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None, cleanup=True):
+def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None,
+                          cleanup=True):
     """Recursively retrieves LinkAhead record types and properties, starting
     from given initial types up to a specific depth.
 
diff --git a/src/linkahead/utils/register_tests.py b/src/linkahead/utils/register_tests.py
index 6909544fed5a6f80572f60ba102c72b53568d897..66fd4553346075fc77aa7b1f6003d26d9967c223 100644
--- a/src/linkahead/utils/register_tests.py
+++ b/src/linkahead/utils/register_tests.py
@@ -18,44 +18,62 @@
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-import linkahead as db
-from linkahead import administration as admin
-
-"""
-This module implements a registration procedure for integration tests which
+"""This module implements a registration procedure for integration tests which
 need a running LinkAhead instance.
 
-It ensures that tests do not accidentally overwrite data in real LinkAhead
-instances, as it checks whether the running LinkAhead instance is actually the
-correct one, that
-should be used for these tests.
-
-The test files have to define a global variable TEST_KEY which must be unique
-for each test using
+It ensures that tests do not accidentally overwrite data in real
+LinkAhead instances, as it checks whether the running LinkAhead
+instance is actually the correct one, that should be used for these
+tests.
 
-set_test_key("ABCDE")
+The test files have to define a global variable ``TEST_KEY`` which
+must be unique for each test using
+:py:meth:`~linkahead.utils.register_tests.set_test_key`.
 
 The test procedure (invoked by pytest) checks whether a registration
 information is stored in one of the server properties or otherwise
-- offers to register this test in the currently running database ONLY if this
-  is empty.
+
+- offers to register this test in the currently running database ONLY if this is
+  empty.
 - fails otherwise with a RuntimeError
 
-NOTE: you probably need to use pytest with the -s option to be able to
-      register the test interactively. Otherwise, the server property has to be
-      set before server start-up in the server.conf of the LinkAhead server.
+.. note::
+
+    you probably need to use pytest with the -s option to be able to
+    register the test interactively. Otherwise, the server property
+    has to be set before server start-up in the server.conf of the
+    LinkAhead server.
 
 This module is intended to be used with pytest.
 
-There is a pytest fixture "clear_database" that performs the above mentioned
-checks and clears the database in case of success.
+There is a pytest fixture
+:py:meth:`~linkahead.utils.register_tests.clear_database` that
+performs the above mentioned checks and clears the database in case of
+success.
+
 """
 
+import linkahead as db
+from linkahead import administration as admin
+
 TEST_KEY = None
 
 
-def set_test_key(KEY):
+def set_test_key(KEY: str):
+    """Set the global ``TEST_KEY`` variable to `KEY`. Afterwards, if
+    `KEY` matches the ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server
+    environment variable, mehtods like :py:meth:`clear_database` can
+    be used. Call this function in the beginning of your test file.
+
+    Parameters
+    ----------
+    KEY : str
+        key with which the test using this function is registered and
+        which is checked against the
+        ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server environment
+        variable.
+
+    """
     global TEST_KEY
     TEST_KEY = KEY
 
@@ -122,10 +140,14 @@ try:
 
     @pytest.fixture
     def clear_database():
-        """Remove Records, RecordTypes, Properties, and Files ONLY IF the LinkAhead
-        server the current connection points to was registered with the appropriate key.
+        """Remove Records, RecordTypes, Properties, and Files ONLY IF
+        the LinkAhead server the current connection points to was
+        registered with the appropriate key using
+        :py:meth:`set_test_key`.
+
+        PyTestInfo Records and the corresponding RecordType and
+        Property are preserved.
 
-        PyTestInfo Records and the corresponding RecordType and Property are preserved.
         """
         _assure_test_is_registered()
         yield _clear_database()  # called before the test function
diff --git a/unittests/test_apiutils.py b/unittests/test_apiutils.py
index fdd5adda065a563b15008f1b840539c110921b65..6667089abc2d16e59bd97d16f7d0fe75d07afe1b 100644
--- a/unittests/test_apiutils.py
+++ b/unittests/test_apiutils.py
@@ -991,3 +991,31 @@ def test_describe_diff():
 
     assert "first" not in diffout
     assert "second" not in diffout
+
+
+def test_diff_without_names():
+    """Test compare_entities in case of properties and parents with
+    ids and without names
+    (cf. https://gitlab.com/linkahead/linkahead-pylib/-/issues/119).
+
+    """
+
+    r1 = db.Record(name="Test").add_parent(name="TestType")
+    r2 = db.Record(name="Test").add_parent(name="TestType")
+    r2.add_property(id=123, value="Test")
+
+    diff1, diff2 = compare_entities(r1, r2)
+    assert len(diff1["properties"]) == 0
+    assert len(diff2["properties"]) == 1
+    assert 123 in diff2["properties"]
+    assert None not in diff2["properties"]
+
+    r3 = db.Record().add_parent(id=101)
+    r4 = db.Record().add_parent(id=102)
+    diff3, diff4 = compare_entities(r3, r4)
+    assert len(diff3["parents"]) == 1
+    assert 101 in diff3["parents"]
+    assert None not in diff3["parents"]
+    assert len(diff4["parents"]) == 1
+    assert 102 in diff4["parents"]
+    assert None not in diff3["parents"]
diff --git a/unittests/test_authentication_auth_token.py b/unittests/test_authentication_auth_token.py
index 3142f1f9f54230cb19666eeb8ff5809a906f9d49..4eb17bcc3892a0d0cad0f2c86289c2e8c625d426 100644
--- a/unittests/test_authentication_auth_token.py
+++ b/unittests/test_authentication_auth_token.py
@@ -96,6 +96,6 @@ def test_logout_calls_delete():
                              auth_token="[request token]",
                              implementation=MockUpServerConnection)
 
-    c._delegate_connection.resources.append(logout_resource)
+    c._delegate_connection.resources.insert(1, logout_resource)
     c._logout()
     mock.method.assert_called_once()
diff --git a/unittests/test_configs/pylinkahead-timeout1.ini b/unittests/test_configs/pylinkahead-timeout1.ini
new file mode 100644
index 0000000000000000000000000000000000000000..d9f894bfeba4f98ed30d96d8c29e057b5a1e643a
--- /dev/null
+++ b/unittests/test_configs/pylinkahead-timeout1.ini
@@ -0,0 +1,4 @@
+[Connection]
+url=https://localhost:10443/
+password_method = unauthenticated
+timeout = None
diff --git a/unittests/test_configs/pylinkahead-timeout2.ini b/unittests/test_configs/pylinkahead-timeout2.ini
new file mode 100644
index 0000000000000000000000000000000000000000..b3d3796f82148459efb8e19344fe11af9e7934ec
--- /dev/null
+++ b/unittests/test_configs/pylinkahead-timeout2.ini
@@ -0,0 +1,4 @@
+[Connection]
+url=https://localhost:10443/
+password_method = unauthenticated
+timeout = (1,20)
diff --git a/unittests/test_configuration.py b/unittests/test_configuration.py
index 95bc906c6c044c51548aa864326cc93f29a6042a..772e872c08e0a7c4aae3feffdb58244f6ad0c849 100644
--- a/unittests/test_configuration.py
+++ b/unittests/test_configuration.py
@@ -24,6 +24,7 @@
 
 from os import environ, getcwd, remove
 from os.path import expanduser, isfile, join
+from pathlib import Path
 
 import linkahead as db
 import pytest
@@ -66,3 +67,18 @@ def test_config_ini_via_envvar(temp_ini_files):
     assert expanduser("~/.pylinkahead.ini") in db.configuration._read_config_files()
     # test configuration file in cwd
     assert join(getcwd(), "pylinkahead.ini") in db.configuration._read_config_files()
+
+
+def test_config_timeout_option():
+    expected_results = [None, (1, 20)]
+    # Iterate through timeout test configs
+    test_configs = Path(__file__).parent/'test_configs'
+    for test_config in test_configs.rglob('pylinkahead-timeout*.ini'):
+        # Test that test configs can be parsed
+        db.configure(str(test_config))
+        dct = db.configuration.config_to_yaml(db.get_config())
+        # Test that resulting dict has correct content for timeout
+        assert 'Connection' in dct
+        assert 'timeout' in dct['Connection']
+        assert dct['Connection']['timeout'] in expected_results
+        expected_results.remove(dct['Connection']['timeout'])
diff --git a/unittests/test_connection.py b/unittests/test_connection.py
index a3a1eff705c64f59baec33088906bdd9a4daa14d..5d22efa46e3a6c10452085d735d1bd6f056a81fc 100644
--- a/unittests/test_connection.py
+++ b/unittests/test_connection.py
@@ -25,14 +25,18 @@
 # pylint: disable=missing-docstring
 from __future__ import print_function, unicode_literals
 
+import io
 import re
 from builtins import bytes, str  # pylint: disable=redefined-builtin
 
+import requests
+
 from linkahead import execute_query
 from linkahead.configuration import _reset_config, get_config
 from linkahead.connection.authentication.interface import CredentialsAuthenticator
 from linkahead.connection.connection import (CaosDBServerConnection,
                                              _DefaultCaosDBServerConnection,
+                                             _WrappedHTTPResponse,
                                              configure_connection)
 from linkahead.connection.mockup import (MockUpResponse, MockUpServerConnection,
                                          _request_log_message)
@@ -216,9 +220,9 @@ def test_init_connection():
 def test_resources_list():
     connection = test_init_connection()
     assert hasattr(connection, "resources")
-    assert len(connection.resources) == 1
-    connection.resources.append(lambda **kwargs: test_init_response())
     assert len(connection.resources) == 2
+    connection.resources.append(lambda **kwargs: test_init_response())
+    assert len(connection.resources) == 3
 
     return connection
 
@@ -324,3 +328,51 @@ def test_auth_token_connection():
                                 "auth_token authenticator cannot log in "
                                 "again. You must provide a new authentication "
                                 "token.")
+
+
+def test_buffer_read():
+    """Test the buffering in _WrappedHTTPResponse.read()"""
+
+    class MockResponse(requests.Response):
+        def __init__(self, content: bytes):
+            """A mock response
+
+            Parameters
+            ----------
+            content : bytes
+              The fake content.
+            """
+            super().__init__()
+            self._content = content
+            bio = io.BytesIO(expected)
+            self.raw = bio
+
+    expected = b"This response."
+    MockResponse(expected)
+
+    #############################
+    # Check for some exceptions #
+    #############################
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    with raises(BufferError) as rte:
+        resp.read(4)
+        resp.read()
+    assert "`size` parameter can not be None" in str(rte.value)
+
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    with raises(BufferError) as rte:
+        resp.read(4)
+        resp.read(0)
+    assert "`size` parameter can not be None" in str(rte.value)
+
+    print("---")
+    resp = _WrappedHTTPResponse(response=MockResponse(expected))
+    result = (
+        resp.read(4)
+        + resp.read(2)
+        + resp.read(2)  # This line failed before.
+        + resp.read(4)  # Reading the rest in two chunks, because of current limitations in read().
+        + resp.read(2)
+    )
+
+    assert result == expected
diff --git a/unittests/test_container.py b/unittests/test_container.py
index c3a60140d43383c81f03c38c9dd5cc7779bc77ba..9df40ffbbdd62b93453058993dbe64bcf3028fb5 100644
--- a/unittests/test_container.py
+++ b/unittests/test_container.py
@@ -70,7 +70,8 @@ def test_get_property_values():
                                           )
     assert len(table) == 2
     house_row = table[0]
-    assert house_row == (house.name, 40.2, "ft", window.id, None, None, None, 20.5, 20.5, "m", owner.name)
+    assert house_row == (house.name, 40.2, "ft", window.id, None, None, None, 20.5, 20.5, "m",
+                         owner.name)
 
     owner_row = table[1]
     assert owner_row == (owner.name, None, None, None, None, None, None, None, None, None, None)
@@ -199,3 +200,15 @@ def test_container_slicing():
 
     with pytest.raises(TypeError):
         cont[[0, 2, 3]]
+
+
+def test_container_filter():
+    # this is a very rudimentary test since filter_by_identity is based on
+    # _filter_entity_list_by_identity which is tested
+    # separately
+    cont = db.Container()
+    cont.extend([db.Record(name=f"TestRec{ii+1}") for ii in range(5)])
+
+    recs = cont.filter_by_identity(name="TestRec2")
+    assert len(recs) == 1
+    recs[0].name == "TestRec2"
diff --git a/unittests/test_entity.py b/unittests/test_entity.py
index 2127ce028f4de55b8ef0ca704c1e69959c24ba82..f2164d9680471e0ed52b47943f0108ef7e4ce60f 100644
--- a/unittests/test_entity.py
+++ b/unittests/test_entity.py
@@ -29,7 +29,10 @@ import unittest
 import linkahead
 from linkahead import (INTEGER, Entity, Parent, Property, Record, RecordType,
                        configure_connection)
-from linkahead.common.models import SPECIAL_ATTRIBUTES
+import warnings
+from linkahead.common.models import (SPECIAL_ATTRIBUTES, get_id_from_versionid,
+                                     value_matches_versionid)
+from linkahead.common.versioning import Version
 from linkahead.connection.mockup import MockUpServerConnection
 from lxml import etree
 from pytest import raises
@@ -161,7 +164,7 @@ def test_property_list():
     pl.append(p3)
 
 
-def test_filter():
+def test_filter_by_identity():
     rt1 = RecordType(id=100)
     rt2 = RecordType(id=101, name="RT")
     rt3 = RecordType(name="")
@@ -184,7 +187,7 @@ def test_filter():
         for coll in [entity.properties, entity.parents]:
             for ent in test_ents:
                 assert ent not in coll
-                assert ent not in coll.filter(ent)
+                assert ent not in coll.filter_by_identity(ent)
 
         # Checks with each type
         t, t_props, t_pars = entity, entity.properties, entity.parents
@@ -194,23 +197,23 @@ def test_filter():
         tp1 = t.properties[-1]
         t.add_property(p3)
         tp3 = t.properties[-1]
-        assert len(t_props.filter(pid=100)) == 1
-        assert tp1 in t_props.filter(pid=100)
-        assert len(t_props.filter(pid="100")) == 1
-        assert tp1 in t_props.filter(pid="100")
-        assert len(t_props.filter(pid=101, name="RT")) == 1
-        assert tp3 in t_props.filter(pid=101, name="RT")
+        assert len(t_props.filter_by_identity(pid=100)) == 1
+        assert tp1 in t_props.filter_by_identity(pid=100)
+        assert len(t_props.filter_by_identity(pid="100")) == 1
+        assert tp1 in t_props.filter_by_identity(pid="100")
+        assert len(t_props.filter_by_identity(pid=101, name="RT")) == 1
+        assert tp3 in t_props.filter_by_identity(pid=101, name="RT")
         for entity in [rt1, p2, r1, r2]:
-            assert entity not in t_props.filter(pid=100)
-            assert tp1 in t_props.filter(entity)
+            assert entity not in t_props.filter_by_identity(pid=100)
+            assert tp1 in t_props.filter_by_identity(entity)
         # Check that direct addition (not wrapped) works
         t_props.append(p2)
         tp2 = t_props[-1]
-        assert tp2 in t_props.filter(pid=100)
-        assert tp2 not in t_props.filter(pid=101, name="RT")
+        assert tp2 in t_props.filter_by_identity(pid=100)
+        assert tp2 not in t_props.filter_by_identity(pid=101, name="RT")
         for entity in [rt1, r1, r2]:
-            assert entity not in t_props.filter(pid=100)
-            assert tp2 in t_props.filter(entity)
+            assert entity not in t_props.filter_by_identity(pid=100)
+            assert tp2 in t_props.filter_by_identity(entity)
 
         # Parents
         # Filtering with both name and id
@@ -218,67 +221,99 @@ def test_filter():
         tr3 = t.parents[-1]
         t.add_parent(r5)
         tr5 = t.parents[-1]
-        assert tr3 in t_pars.filter(pid=101)
-        assert tr5 not in t_pars.filter(pid=101)
-        assert tr3 not in t_pars.filter(name="R")
-        assert tr5 in t_pars.filter(name="R")
-        assert tr3 in t_pars.filter(pid=101, name="R")
-        assert tr5 not in t_pars.filter(pid=101, name="R")
-        assert tr3 not in t_pars.filter(pid=104, name="RT")
-        assert tr5 in t_pars.filter(pid=104, name="RT")
-        assert tr3 not in t_pars.filter(pid=105, name="T")
-        assert tr5 not in t_pars.filter(pid=105, name="T")
+        assert tr3 in t_pars.filter_by_identity(pid=101)
+        assert tr5 not in t_pars.filter_by_identity(pid=101)
+        assert tr3 not in t_pars.filter_by_identity(name="R")
+        assert tr5 in t_pars.filter_by_identity(name="R")
+        assert tr3 in t_pars.filter_by_identity(pid=101, name="R")
+        assert tr5 not in t_pars.filter_by_identity(pid=101, name="R")
+        assert tr3 not in t_pars.filter_by_identity(pid=104, name="RT")
+        assert tr5 in t_pars.filter_by_identity(pid=104, name="RT")
+        assert tr3 not in t_pars.filter_by_identity(pid=105, name="T")
+        assert tr5 not in t_pars.filter_by_identity(pid=105, name="T")
         # Works also without id / name and with duplicate parents
         for ent in test_ents:
             t.add_parent(ent)
         for ent in t_pars:
-            assert ent in t_pars.filter(ent)
+            assert ent in t_pars.filter_by_identity(ent)
 
     # Grid-Based
     r7 = Record()
     r7.add_property(Property()).add_property(name="A").add_property(name="B")
     r7.add_property(id=27).add_property(id=27, name="A").add_property(id=27, name="B")
     r7.add_property(id=43).add_property(id=43, name="A").add_property(id=43, name="B")
-    assert len(r7.properties.filter(pid=27)) == 3
-    assert len(r7.properties.filter(pid=43)) == 3
-    assert len(r7.properties.filter(pid=43, conjunction=True)) == 3
-    assert len(r7.properties.filter(name="A")) == 3
-    assert len(r7.properties.filter(name="B")) == 3
-    assert len(r7.properties.filter(name="B", conjunction=True)) == 3
-    assert len(r7.properties.filter(pid=1, name="A")) == 1
-    assert len(r7.properties.filter(pid=1, name="A", conjunction=True)) == 0
-    assert len(r7.properties.filter(pid=27, name="B")) == 4
-    assert len(r7.properties.filter(pid=27, name="B", conjunction=True)) == 1
-    assert len(r7.properties.filter(pid=27, name="C")) == 3
-    assert len(r7.properties.filter(pid=27, name="C", conjunction=True)) == 0
+    assert len(r7.properties.filter_by_identity(pid=27)) == 3
+    assert len(r7.properties.filter_by_identity(pid=43)) == 3
+    assert len(r7.properties.filter_by_identity(pid=43, conjunction=True)) == 3
+    assert len(r7.properties.filter_by_identity(name="A")) == 3
+    assert len(r7.properties.filter_by_identity(name="B")) == 3
+    assert len(r7.properties.filter_by_identity(name="B", conjunction=True)) == 3
+    assert len(r7.properties.filter_by_identity(pid=1, name="A")) == 1
+    assert len(r7.properties.filter_by_identity(pid=1, name="A", conjunction=True)) == 0
+    assert len(r7.properties.filter_by_identity(pid=27, name="B")) == 4
+    assert len(r7.properties.filter_by_identity(pid=27, name="B", conjunction=True)) == 1
+    assert len(r7.properties.filter_by_identity(pid=27, name="C")) == 3
+    assert len(r7.properties.filter_by_identity(pid=27, name="C", conjunction=True)) == 0
     # Entity based filtering behaves the same
-    assert (r7.properties.filter(pid=27) ==
-            r7.properties.filter(Property(id=27)))
-    assert (r7.properties.filter(pid=43, conjunction=True) ==
-            r7.properties.filter(Property(id=43), conjunction=True))
-    assert (r7.properties.filter(name="A") ==
-            r7.properties.filter(Property(name="A")))
-    assert (r7.properties.filter(name="B") ==
-            r7.properties.filter(Property(name="B")))
-    assert (r7.properties.filter(name="B", conjunction=True) ==
-            r7.properties.filter(Property(name="B"), conjunction=True))
-    assert (r7.properties.filter(pid=1, name="A") ==
-            r7.properties.filter(Property(id=1, name="A")))
-    assert (r7.properties.filter(pid=1, name="A", conjunction=True) ==
-            r7.properties.filter(Property(id=1, name="A"), conjunction=True))
-    assert (r7.properties.filter(pid=27, name="B") ==
-            r7.properties.filter(Property(id=27, name="B")))
-    assert (r7.properties.filter(pid=27, name="B", conjunction=True) ==
-            r7.properties.filter(Property(id=27, name="B"), conjunction=True))
-    assert (r7.properties.filter(pid=27, name="C") ==
-            r7.properties.filter(Property(id=27, name="C")))
-    assert (r7.properties.filter(pid=27, name="C", conjunction=True) ==
-            r7.properties.filter(Property(id=27, name="C"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=27) ==
+            r7.properties.filter_by_identity(Property(id=27)))
+    assert (r7.properties.filter_by_identity(pid=43, conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=43), conjunction=True))
+    assert (r7.properties.filter_by_identity(name="A") ==
+            r7.properties.filter_by_identity(Property(name="A")))
+    assert (r7.properties.filter_by_identity(name="B") ==
+            r7.properties.filter_by_identity(Property(name="B")))
+    assert (r7.properties.filter_by_identity(name="B", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(name="B"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=1, name="A") ==
+            r7.properties.filter_by_identity(Property(id=1, name="A")))
+    assert (r7.properties.filter_by_identity(pid=1, name="A", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=1, name="A"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=27, name="B") ==
+            r7.properties.filter_by_identity(Property(id=27, name="B")))
+    assert (r7.properties.filter_by_identity(pid=27, name="B", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=27, name="B"), conjunction=True))
+    assert (r7.properties.filter_by_identity(pid=27, name="C") ==
+            r7.properties.filter_by_identity(Property(id=27, name="C")))
+    assert (r7.properties.filter_by_identity(pid=27, name="C", conjunction=True) ==
+            r7.properties.filter_by_identity(Property(id=27, name="C"), conjunction=True))
     # Name only matching and name overwrite
     r8 = Record().add_property(name="A").add_property(name="B").add_property(name="B")
     r8.add_property(Property(name="A"), name="B")
     r8.add_property(Property(name="A", id=12), name="C")
-    assert len(r8.properties.filter(name="A")) == 1
-    assert len(r8.properties.filter(name="B")) == 3
-    assert len(r8.properties.filter(name="C")) == 1
-    assert len(r8.properties.filter(pid=12)) == 1
+    assert len(r8.properties.filter_by_identity(name="A")) == 1
+    assert len(r8.properties.filter_by_identity(name="B")) == 3
+    assert len(r8.properties.filter_by_identity(name="C")) == 1
+    assert len(r8.properties.filter_by_identity(pid=12)) == 1
+
+    with warnings.catch_warnings(record=True) as w:
+        # Cause all warnings to always be triggered.
+        warnings.simplefilter("always")
+
+        r7.properties.filter(pid=34)
+        assert issubclass(w[-1].category, DeprecationWarning)
+        assert "This function was renamed" in str(w[-1].message)
+
+        t.parents.filter(pid=234)
+        assert issubclass(w[-1].category, DeprecationWarning)
+        assert "This function was renamed" in str(w[-1].message)
+
+
+def test_value_matches_versionid():
+    assert value_matches_versionid(234) is False, "integer is no version id"
+    assert value_matches_versionid("234") is False, ("string that only contains an integer is no "
+                                                     "version id")
+    assert value_matches_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") is True, (
+        "integer is no version id") is True
+    with raises(ValueError):
+        value_matches_versionid(234.0)
+
+
+def test_get_id_from_versionid():
+    assert get_id_from_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") == "234"
+
+
+def test_get_versionid():
+    e = Entity(id=234)
+    e.version = Version(id="bfe1a42cb37aae8ac625a757715d38814c274158")
+    assert e.get_versionid() == "234@bfe1a42cb37aae8ac625a757715d38814c274158"
diff --git a/unittests/test_error_handling.py b/unittests/test_error_handling.py
index 3f5241466e9a8f810b581cbb587e17ccf8f123ee..64f743c85e9df554e7428cf7d8477e8c823a9758 100644
--- a/unittests/test_error_handling.py
+++ b/unittests/test_error_handling.py
@@ -30,7 +30,7 @@ import linkahead as db
 from linkahead.common.models import raise_errors
 from linkahead.exceptions import (AuthorizationError,
                                   EntityDoesNotExistError, EntityError,
-                                  EntityHasNoDatatypeError,
+                                  EntityHasNoDatatypeError, HTTPServerError,
                                   TransactionError, UniqueNamesError,
                                   UnqualifiedParentsError,
                                   UnqualifiedPropertiesError)
@@ -315,3 +315,26 @@ def test_container_with_faulty_elements():
             # record raises both of them
             assert (isinstance(err, UnqualifiedParentsError) or
                     isinstance(err, UnqualifiedPropertiesError))
+
+
+def test_incomplete_server_error_response():
+    """The reason behind https://gitlab.com/linkahead/linkahead-pylib/-/issues/87."""
+    # Case 1: Response is no XML at all
+    err = HTTPServerError("Bla")
+    assert str(err) == "Bla"
+
+    # Case 2: Response is an incomplete XML, e.g. due to very unlucky timeout
+    err = HTTPServerError("<incomplete>XML</inc")
+    assert str(err) == "<incomplete>XML</inc"
+
+    # Case 3: Response is complete XML but doesn't have response and or error information
+    err = HTTPServerError("<complete>XML</complete>")
+    assert str(err) == "<complete>XML</complete>"
+
+    # Case 4: Response is an XML response but the error is lacking a description
+    err = HTTPServerError("<Response><Error>complete error</Error></Response>")
+    assert str(err) == "complete error"
+
+    # Case 5: Healthy error Response
+    err = HTTPServerError("<Response><Error description='Error'>complete error</Error></Response>")
+    assert str(err) == "Error\n\ncomplete error"
diff --git a/unittests/test_issues.py b/unittests/test_issues.py
index e24afbe8b7be8d9a87d85819eccd3a4bf0d453e8..3b0117b28c1300ea1eb0919fce02e3881c2ab025 100644
--- a/unittests/test_issues.py
+++ b/unittests/test_issues.py
@@ -26,6 +26,7 @@ import linkahead as db
 
 from datetime import date, datetime
 from pytest import raises
+from linkahead.common.utils import xml2str
 
 
 def test_issue_100():
@@ -90,3 +91,40 @@ def test_issue_128():
     assert prop_list.value == [today, today]
     prop_list.value = [now, now]
     assert prop_list.value == [now, now]
+
+
+def test_issue_73():
+    """
+    Test to_xml infinite recursion handling with cross- and self-references.
+    https://gitlab.com/linkahead/linkahead-pylib/-/issues/73
+    """
+    # Cross-reference in the property values
+    rt = db.RecordType(name="RT")
+    recA = db.Record().add_parent(rt)
+    recB = db.Record().add_parent(rt)
+    recA.add_property(name="RT", value=recB)
+    recB.add_property(name="RT", value=recA)
+    xml_str = xml2str(recB.to_xml())
+    assert "<Parent name=\"RT" in xml_str
+    assert "<Property name=\"RT" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500
+
+    # Cross-reference in the properties themselves
+    prop1 = db.Property(name="Prop1")
+    prop2 = db.Property(name="Prop2")
+    prop1.add_property(prop2)
+    prop2.add_property(prop1)
+    xml_str = xml2str(prop2.to_xml())
+    assert "<Property name=\"Prop1" in xml_str
+    assert "<Property name=\"Prop2" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500
+
+    # Self-reference in the datatype
+    prop = db.Property()
+    prop.datatype = prop
+    xml_str = xml2str(prop.to_xml())
+    assert "datatype=" in xml_str
+    assert "Recursive reference" in xml_str
+    assert len(xml_str) < 500