diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index db600343569930a436a593a8ab5d511a35bc7aca..a773c6776b5224dc482bc104fe490e98b9e19eb5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -101,13 +101,26 @@ unittest_py3.12:
   script: *python_test_script
 
 unittest_py3.13:
-  allow_failure: true
   tags: [ docker ]
   stage: test
   needs: [ ]
   image: python:3.13
   script: *python_test_script
 
+unittest_py3.14:
+  allow_failure: true   # remove on release
+  tags: [ docker ]
+  stage: test
+  needs: [ ]
+  image: python:3.14-rc
+  script:               # replace by '*python_test_script' on release
+    # Install cargo manually, source its env, and set it to accept 3.14 as interpreter
+    - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
+    - . "$HOME/.cargo/env"
+    - export PYO3_USE_ABI3_FORWARD_COMPATIBILITY=1
+    # Continue normally
+    - *python_test_script
+
 # Trigger building of server image and integration tests
 trigger_build:
   stage: deploy
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f56bc3abe12fc0dde3077e74b94472a366727074..790b8abadd3c13be18c3fd4f022f9e0db0a446ff 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file.
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
 and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 
+## [0.18.0] - 2025-05-27 ##
+
+### Added ###
+
+- convenience functions `value_matches_versionid`, `get_id_from_versionid` and `get_versionid`
+- High level API: Parameter for serialization to output a plain JSON.
+- High level API: Parameter to resolve references when converting to Python object.
+
+### Removed ###
+
+- `DropOffBox` class and related parameters (`pickup` for file uploading).
+
+### Documentation ###
+
+* [#78](https://gitlab.com/linkahead/linkahead-pylib/-/issues/78) Fix
+  and extend test-registration docstrings.
+
 ## [0.17.0] - 2025-01-14 ##
 
 ### Added ###
diff --git a/CITATION.cff b/CITATION.cff
index bcecc2fdd962f4c581a2d53d5c1a324fb643a4a3..e685ff0c010755a512d4be6ab78854f594950273 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -20,6 +20,6 @@ authors:
     given-names: Stefan
     orcid: https://orcid.org/0000-0001-7214-8125
 title: CaosDB - Pylib
-version: 0.17.0
+version: 0.18.0
 doi: 10.3390/data4020083
-date-released: 2025-01-14
+date-released: 2025-05-27
diff --git a/Makefile b/Makefile
index 21ea40ac8a6eb34032aba75c089e278fa354a6f5..7490c5d586c18d9da361f8940a39550779fb5938 100644
--- a/Makefile
+++ b/Makefile
@@ -40,7 +40,7 @@ style:
 .PHONY: style
 
 lint:
-	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead/common
+	pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead
 .PHONY: lint
 
 mypy:
diff --git a/setup.py b/setup.py
index 75bcf0c762bc10aed857c741bcf4791f73bb461b..9047cbd0ff2d65a37a9a317229c6349d50a39968 100755
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,7 @@ from setuptools import find_packages, setup
 
 ISRELEASED = True
 MAJOR = 0
-MINOR = 17
+MINOR = 18
 MICRO = 0
 # Do not tag as pre-release until this commit
 # https://github.com/pypa/packaging/pull/515
diff --git a/src/doc/conf.py b/src/doc/conf.py
index 656006787e51949ccefbf213d0a80a74df556c75..46c1645c0117df75ec276c1328a423fb29e722fe 100644
--- a/src/doc/conf.py
+++ b/src/doc/conf.py
@@ -29,10 +29,10 @@ copyright = '2024, IndiScale GmbH'
 author = 'Daniel Hornung'
 
 # The short X.Y version
-version = '0.17.0'
+version = '0.18.0'
 # The full version, including alpha/beta/rc tags
 # release = '0.5.2-rc2'
-release = '0.17.0'
+release = '0.18.0'
 
 
 # -- General configuration ---------------------------------------------------
diff --git a/src/linkahead/__init__.py b/src/linkahead/__init__.py
index 567748e3b3a58fb73b91f652d82ed10f818d6014..9d5d754619112f94246451ef875c454a8fbf9f4f 100644
--- a/src/linkahead/__init__.py
+++ b/src/linkahead/__init__.py
@@ -42,7 +42,7 @@ from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, LIST,
                               REFERENCE, TEXT)
 # Import of the basic  API classes:
 from .common.models import (ACL, ALL, FIX, NONE, OBLIGATORY, RECOMMENDED,
-                            SUGGESTED, Container, DropOffBox, Entity, File, Parent,
+                            SUGGESTED, Container, Entity, File, Parent,
                             Info, Message, Permissions, Property, Query,
                             QueryTemplate, Record, RecordType, delete,
                             execute_query, get_global_acl,
@@ -55,7 +55,7 @@ from .utils.get_entity import (get_entity_by_id, get_entity_by_name,
                                get_entity_by_path)
 
 try:
-    from .version import version as __version__
+    from .version import version as __version__  # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
     __version__ = version
diff --git a/src/linkahead/apiutils.py b/src/linkahead/apiutils.py
index b2a612faea1616c64b7e78575156abccfdb29e61..f0fe2719a80d6a14e994cc2485baf5b3e0a5a65b 100644
--- a/src/linkahead/apiutils.py
+++ b/src/linkahead/apiutils.py
@@ -291,9 +291,10 @@ def compare_entities(entity0: Optional[Entity] = None,
     if entity0 is entity1:
         return diff
 
+    # FIXME Why not simply return a diff which says that the types are different?
     if type(entity0) is not type(entity1):
-        raise ValueError(
-            "Comparison of different Entity types is not supported.")
+        diff[0]["type"] = type(entity0)
+        diff[1]["type"] = type(entity1)
 
     # compare special attributes
     for attr in SPECIAL_ATTRIBUTES:
diff --git a/src/linkahead/common/models.py b/src/linkahead/common/models.py
index 75b03b70907abdde50fb383c54a02e94cad115ad..8d1afbac2deb678f478632ab06b31dc6c706d10f 100644
--- a/src/linkahead/common/models.py
+++ b/src/linkahead/common/models.py
@@ -338,22 +338,13 @@ class Entity:
     def file(self, new_file):
         self.__file = new_file
 
-    @property
-    def pickup(self):
-        if self.__pickup is not None or self._wrapped_entity is None:
-            return self.__pickup
-
-        return self._wrapped_entity.pickup
-
-    @pickup.setter
-    def pickup(self, new_pickup):
-        self.__pickup = new_pickup
-
+    # FIXME Add test.
     @property   # getter for _cuid
     def cuid(self):
         # Set if None?
         return self._cuid
 
+    # FIXME Add test.
     @property   # getter for _flags
     def flags(self):
         return self._flags.copy()   # for dict[str, str] shallow copy is enough
@@ -505,6 +496,10 @@ class Entity:
 
         return self
 
+    def get_versionid(self):
+        """Returns the concatenation of ID and version"""
+        return str(self.id)+"@"+str(self.version.id)
+
     def get_importance(self, property):  # @ReservedAssignment
         """Get the importance of a given property regarding this entity."""
 
@@ -1734,8 +1729,8 @@ def _parse_value(datatype, value):
             return ret
 
     # This is for a special case, where the xml parser could not differentiate
-    # between single values and lists with one element. As
-    if hasattr(value, "__len__") and len(value) == 1:
+    # between single values and lists with one element.
+    if hasattr(value, "__len__") and not isinstance(value, str) and len(value) == 1:
         return _parse_value(datatype, value[0])
 
     # deal with references
@@ -2303,29 +2298,30 @@ class File(Record):
     """This class represents LinkAhead's file entities.
 
     For inserting a new file to the server, `path` gives the new location, and
-    (exactly?) one of `file` and `pickup` should (must?) be given to specify the
-    source of the file.
+    `file` specifies the source of the file.
 
     Symlinking from the "extroot" file system is not supported by this API yet,
     it can be done manually using the `InsertFilesInDir` flag.  For sample code,
     look at `test_files.py` in the Python integration tests of the
     `load_files.py` script in the advanced user tools.
 
-    @param name: A name for this file record (That's an entity name - not to be
-        confused with the last segment of the files path).
-    @param id: An ID.
-    @param description: A description for this file record.
-    @param path: The complete path, including the file name, of the file in the
-        server's "caosroot" file system.
-    @param file: A local path or python file object.  The file designated by
-        this argument will be uploaded to the server via HTTP.
-    @param pickup: A file/folder in the DropOffBox (the server will move that
-        file into its "caosroot" file system).
-    @param thumbnail: (Local) filename to a thumbnail for this file.
-    @param properties: A list of properties for this file record. @todo is this
-        implemented?
-    @param from_location: Deprecated, use `pickup` instead.
-
+    @param name
+        A name for this file *Record* (That's an entity name - not to be confused with the last
+        segment of the files path).
+    @param id
+        An ID.
+    @param description
+        A description for this file record.
+    @param path
+        The complete path, including the file name, of the file in the server's "caosroot" file
+        system.
+    @param file
+        A local path or python file object.  The file designated by this argument will be uploaded
+        to the server via HTTP.
+    @param thumbnail
+        (Local) filename to a thumbnail for this file.
+    @param properties
+        A list of properties for this file record. @todo is this implemented?
     """
 
     def __init__(
@@ -2335,9 +2331,7 @@ class File(Record):
         description: Optional[str] = None,  # @ReservedAssignment
         path: Optional[str] = None,
         file: Union[str, TextIO, None] = None,
-        pickup: Optional[str] = None,  # @ReservedAssignment
         thumbnail: Optional[str] = None,
-        from_location=None,
     ):
         Record.__init__(self, id=id, name=name, description=description)
         self.role = "File"
@@ -2350,15 +2344,6 @@ class File(Record):
         self.file = file
         self.thumbnail = thumbnail
 
-        self.pickup = pickup
-
-        if from_location is not None:
-            warn(DeprecationWarning(
-                "Param `from_location` is deprecated, use `pickup instead`."))
-
-        if self.pickup is None:
-            self.pickup = from_location
-
     def to_xml(
         self,
         xml: Optional[etree._Element] = None,
@@ -3353,6 +3338,11 @@ class Container(list):
         Returns
         -------
         xml_element : etree._Element
+
+        Note
+        ----
+        Calling this method has the side effect that all entities without ID will get a negative
+        integer ID.
         """
         tmpid = 0
 
@@ -3818,6 +3808,8 @@ class Container(list):
                             is_being_referenced.add(prop.value)
                         elif is_list_datatype(prop_dt):
                             for list_item in prop.value:
+                                if list_item is None:
+                                    continue
                                 if isinstance(list_item, int):
                                     is_being_referenced.add(list_item)
                                 else:
@@ -4163,8 +4155,6 @@ class Container(list):
 
             if hasattr(entity, '_upload') and entity._upload is not None:
                 entity_xml.set("upload", entity._upload)
-            elif hasattr(entity, 'pickup') and entity.pickup is not None:
-                entity_xml.set("pickup", entity.pickup)
 
             insert_xml.append(entity_xml)
 
@@ -4337,8 +4327,6 @@ class Container(list):
 
             if hasattr(entity, '_upload') and entity._upload is not None:
                 entity_xml.set("upload", entity._upload)
-            elif hasattr(entity, 'pickup') and entity.pickup is not None:
-                entity_xml.set("pickup", entity.pickup)
             insert_xml.append(entity_xml)
 
         if len(self) > 0 and len(insert_xml) < 1:
@@ -5187,42 +5175,6 @@ def execute_query(
                          cache=cache, page_length=page_length)
 
 
-class DropOffBox(list):
-    def __init__(self, *args, **kwargs):
-        warn(DeprecationWarning("The DropOffBox is deprecated and will be removed in future."))
-        super().__init__(*args, **kwargs)
-
-    path = None
-
-    def sync(self):
-        c = get_connection()
-        _log_request("GET: Info")
-        http_response = c.retrieve(["Info"])
-        body = http_response.read()
-        _log_response(body)
-
-        xml = etree.fromstring(body)
-
-        for child in xml:
-            if str(child.tag).lower() == "stats":
-                infoelem = child
-
-                break
-
-        for child in infoelem:
-            if str(child.tag).lower() == "dropoffbox":
-                dropoffboxelem = child
-
-                break
-        del self[:]
-        self.path = dropoffboxelem.get('path')
-
-        for f in dropoffboxelem:
-            self.append(f.get('path'))
-
-        return self
-
-
 class UserInfo():
     """User information from a server response.
 
@@ -5670,3 +5622,18 @@ def _filter_entity_list_by_identity(listobject: list[Entity],
             if pid_none and name_match:
                 matches.append(candidate)
     return matches
+
+
+def value_matches_versionid(value: Union[int, str]):
+    """Returns True if the value matches the pattern <id>@<version>"""
+    if isinstance(value, int):
+        return False
+    if not isinstance(value, str):
+        raise ValueError(f"A reference value needs to be int or str. It was {type(value)}. "
+                         "Did you call value_matches_versionid on a non reference value?")
+    return "@" in value
+
+
+def get_id_from_versionid(versionid: str):
+    """Returns the ID part of the versionid with the pattern <id>@<version>"""
+    return versionid.split("@")[0]
diff --git a/src/linkahead/connection/connection.py b/src/linkahead/connection/connection.py
index 74dd23177c548dd640c6dd1c03ce4069c366802b..fe99b421ee9d5bc3bc158af6b7f4882232db4d97 100644
--- a/src/linkahead/connection/connection.py
+++ b/src/linkahead/connection/connection.py
@@ -47,7 +47,7 @@ from ..exceptions import (ConfigurationError, HTTPClientError,
                           LoginFailedError)
 
 try:
-    from ..version import version
+    from ..version import version               # pylint: disable=import-error
 except ModuleNotFoundError:
     version = "uninstalled"
 
@@ -56,11 +56,12 @@ from .interface import CaosDBHTTPResponse, CaosDBServerConnection
 from .utils import make_uri_path, urlencode
 
 from typing import TYPE_CHECKING
+from .authentication.interface import CredentialsAuthenticator
 if TYPE_CHECKING:
     from typing import Optional, Any, Iterator, Union
     from requests.models import Response
-    from ssl import _SSLMethod
-    from .authentication.interface import AbstractAuthenticator, CredentialsAuthenticator
+    from ssl import _SSLMethod              # pylint: disable=no-name-in-module
+    from .authentication.interface import AbstractAuthenticator
 
 
 _LOGGER = logging.getLogger(__name__)
diff --git a/src/linkahead/connection/encode.py b/src/linkahead/connection/encode.py
index a76197803c9652e2d0c4e32819ee3e3f97758bfc..0cbb0b69f0a7b50244eb54c8dea7ef43ae713894 100644
--- a/src/linkahead/connection/encode.py
+++ b/src/linkahead/connection/encode.py
@@ -384,7 +384,7 @@ class MultipartYielder(object):
 
     # since python 3
     def __next__(self):
-        return self.next()
+        return self.next()                     # pylint: disable=not-callable
 
     def next(self):
         """generator function to yield multipart/form-data representation of
diff --git a/src/linkahead/exceptions.py b/src/linkahead/exceptions.py
index 7d4dc0850b811c0d696cc66252aa62541c6d3029..0904929cbc553464f60fdd6940a0941fac4fa2b7 100644
--- a/src/linkahead/exceptions.py
+++ b/src/linkahead/exceptions.py
@@ -190,7 +190,7 @@ class QueryNotUniqueError(BadQueryError):
 
 
 class EmptyUniqueQueryError(BadQueryError):
-    """A unique query or retrieve dound no result."""
+    """A unique query or retrieve found no result."""
 
 
 # ######################### Transaction errors #########################
diff --git a/src/linkahead/high_level_api.py b/src/linkahead/high_level_api.py
index 18d219c732672d16d0ab43e562cfe73d682614fe..45839d9b21295964175f9baebaab17d3bfc16f1e 100644
--- a/src/linkahead/high_level_api.py
+++ b/src/linkahead/high_level_api.py
@@ -4,9 +4,10 @@
 #
 # Copyright (C) 2018 Research Group Biomedical Physics,
 # Max-Planck-Institute for Dynamics and Self-Organization Göttingen
+# Copyright (C) 2020-2022,2025 IndiScale GmbH <info@indiscale.com>
 # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com>
-# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com>
 # Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de>
+# Copyright (C) 2025 Daniel Hornung <d.hornung@indiscale.com>
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -21,16 +22,16 @@
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
 #
-# ** end header
-#
+
 # type: ignore
 """
 A high level API for accessing LinkAhead entities from within python.
+This module is experimental, and may be changed or removed in the future.
 
 This is refactored from apiutils.
 """
 
-import warnings
+import logging
 from dataclasses import dataclass, fields
 from datetime import datetime
 from typing import Any, Dict, List, Optional, Union
@@ -44,7 +45,10 @@ from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER,
                               REFERENCE, TEXT, get_list_datatype,
                               is_list_datatype, is_reference)
 
-warnings.warn("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
+logger = logging.getLogger(__name__)
+
+
+logger.warning("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or
 removed in the future. Its purpose is to give an impression on how the Python client user interface
 might be changed.""")
 
@@ -471,8 +475,7 @@ class CaosDBPythonEntity(object):
 
         if isinstance(att, list):
             return att
-        else:
-            return [att]
+        return [att]
 
     def add_parent(self, parent: Union[
             CaosDBPythonUnresolvedParent, "CaosDBPythonRecordType", str]):
@@ -679,53 +682,78 @@ class CaosDBPythonEntity(object):
 
         return entity
 
-    def serialize(self, without_metadata: bool = False, visited: dict = None):
-        """
-        Serialize necessary information into a dict.
+    def serialize(self, without_metadata: bool = None, plain_json: bool = False,
+                  visited: dict = None) -> dict:
+        """Serialize necessary information into a dict.
+
+        Parameters
+        ----------
+
+        without_metadata: bool, optional
+          If True don't set the metadata field in order to increase
+          readability. Not recommended if deserialization is needed.
+
+        plain_json: bool, optional
+          If True, serialize to a plain dict without any additional information besides the property values,
+          name and id.  This should conform to the format as specified by the json schema generated by the
+          advanced user tools.  It also sets all properties as top level items of the resulting dict.  This
+          implies ``without_metadata = True
+
+        Returns
+        -------
 
-        without_metadata: bool
-                          If True don't set the metadata field in order to increase
-                          readability. Not recommended if deserialization is needed.
+        out: dict
+          A dict corresponding to this entity.
+        ``.
         """
+        if plain_json:
+            if without_metadata is None:
+                without_metadata = True
+            if not without_metadata:
+                raise ValueError("`plain_json` implies `without_metadata`.")
+        if without_metadata is None:
+            without_metadata = False
 
         if visited is None:
-            visited = dict()
+            visited = {}
 
         if self in visited:
             return visited[self]
 
-        metadata: Dict[str, Any] = dict()
-        properties = dict()
-        parents = list()
+        metadata: Dict[str, Any] = {}
+        properties = {}
+        parents = []
 
         # The full information to be returned:
-        fulldict = dict()
+        fulldict = {}
         visited[self] = fulldict
 
-        # Add CaosDB role:
-        fulldict["role"] = standard_type_for_high_level_type(self, True)
-
         for parent in self._parents:
             if isinstance(parent, CaosDBPythonEntity):
-                parents.append(parent.serialize(without_metadata, visited))
+                parents.append(parent.serialize(without_metadata=without_metadata,
+                                                plain_json=plain_json,
+                                                visited=visited))
             elif isinstance(parent, CaosDBPythonUnresolvedParent):
                 parents.append({"name": parent.name, "id": parent.id,
                                 "unresolved": True})
             else:
                 raise RuntimeError("Incompatible class used as parent.")
 
-        for baseprop in ("name", "id", "description", "version"):
-            val = self.__getattribute__(baseprop)
-            if val is not None:
-                fulldict[baseprop] = val
+        if not plain_json:
+            # Add LinkAhead role:
+            fulldict["role"] = standard_type_for_high_level_type(self, True)
+            for baseprop in ("name", "id", "description", "version"):
+                val = self.__getattribute__(baseprop)
+                if val is not None:
+                    fulldict[baseprop] = val
 
-        if type(self) == CaosDBPythonFile:
-            fulldict["file"] = self.file
-            fulldict["path"] = self.path
+            if isinstance(self, CaosDBPythonFile):
+                fulldict["file"] = self.file
+                fulldict["path"] = self.path
 
         for p in self.get_properties():
             m = self.get_property_metadata(p)
-            metadata[p] = dict()
+            metadata[p] = {}
             for f in fields(m):
                 val = m.__getattribute__(f.name)
                 if val is not None:
@@ -735,30 +763,37 @@ class CaosDBPythonEntity(object):
             if isinstance(val, CaosDBPythonUnresolvedReference):
                 properties[p] = {"id": val.id, "unresolved": True}
             elif isinstance(val, CaosDBPythonEntity):
-                properties[p] = val.serialize(without_metadata, visited)
+                properties[p] = val.serialize(without_metadata=without_metadata,
+                                              plain_json=plain_json,
+                                              visited=visited)
             elif isinstance(val, list):
                 serializedelements = []
                 for element in val:
                     if isinstance(element, CaosDBPythonUnresolvedReference):
-                        elm = dict()
+                        elm = {}
                         elm["id"] = element.id
                         elm["unresolved"] = True
                         serializedelements.append(elm)
                     elif isinstance(element, CaosDBPythonEntity):
                         serializedelements.append(
-                            element.serialize(without_metadata,
-                                              visited))
+                            element.serialize(without_metadata=without_metadata,
+                                              plain_json=plain_json,
+                                              visited=visited))
                     else:
                         serializedelements.append(element)
                 properties[p] = serializedelements
             else:
                 properties[p] = val
 
-        fulldict["properties"] = properties
-        fulldict["parents"] = parents
-
-        if not without_metadata:
-            fulldict["metadata"] = metadata
+        if plain_json:
+            fulldict["id"] = getattr(self, "id")
+            fulldict["name"] = getattr(self, "name")
+            fulldict.update(properties)
+        else:
+            fulldict["properties"] = properties
+            fulldict["parents"] = parents
+            if not without_metadata:
+                fulldict["metadata"] = metadata
         return fulldict
 
     def __str__(self):
@@ -939,26 +974,32 @@ def convert_to_entity(python_object):
 def convert_to_python_object(entity: Union[db.Container, db.Entity],
                              references: Optional[db.Container] = None,
                              visited: Optional[Dict[int,
-                                                    "CaosDBPythonEntity"]] = None):
+                                                    "CaosDBPythonEntity"]] = None,
+                             resolve_references: Optional[bool] = False,
+                             ):
     """
     Convert either a container of CaosDB entities or a single CaosDB entity
     into the high level representation.
 
-    The optional second parameter can be used
+    The optional ``references`` parameter can be used
     to resolve references that occur in the converted entities and resolve them
     to their correct representations. (Entities that are not found remain as
-    CaosDBPythonUnresolvedReferences.)
+    CaosDBPythonUnresolvedReferences, unless ``resolve_references`` is given and True.)
     """
     if isinstance(entity, db.Container):
         # Create a list of objects:
-        return [convert_to_python_object(i, references, visited) for i in entity]
+        return [convert_to_python_object(ent, references=references, visited=visited,
+                                         resolve_references=resolve_references) for ent in entity]
 
     # TODO: recursion problems?
-    return _single_convert_to_python_object(
+    converted = _single_convert_to_python_object(
         high_level_type_for_standard_type(entity)(),
         entity,
         references,
         visited)
+    if resolve_references:
+        converted.resolve_references(True, references)
+    return converted
 
 
 def new_high_level_entity(entity: db.RecordType,
@@ -1042,8 +1083,6 @@ def query(query: str,
 
     """
     res = db.execute_query(query)
-    objects = convert_to_python_object(res)
-    if resolve_references:
-        for obj in objects:
-            obj.resolve_references(True, references)
+    objects = convert_to_python_object(res, references=references,
+                                       resolve_references=resolve_references)
     return objects
diff --git a/src/linkahead/utils/register_tests.py b/src/linkahead/utils/register_tests.py
index 6909544fed5a6f80572f60ba102c72b53568d897..66fd4553346075fc77aa7b1f6003d26d9967c223 100644
--- a/src/linkahead/utils/register_tests.py
+++ b/src/linkahead/utils/register_tests.py
@@ -18,44 +18,62 @@
 #
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
-
-import linkahead as db
-from linkahead import administration as admin
-
-"""
-This module implements a registration procedure for integration tests which
+"""This module implements a registration procedure for integration tests which
 need a running LinkAhead instance.
 
-It ensures that tests do not accidentally overwrite data in real LinkAhead
-instances, as it checks whether the running LinkAhead instance is actually the
-correct one, that
-should be used for these tests.
-
-The test files have to define a global variable TEST_KEY which must be unique
-for each test using
+It ensures that tests do not accidentally overwrite data in real
+LinkAhead instances, as it checks whether the running LinkAhead
+instance is actually the correct one, that should be used for these
+tests.
 
-set_test_key("ABCDE")
+The test files have to define a global variable ``TEST_KEY`` which
+must be unique for each test using
+:py:meth:`~linkahead.utils.register_tests.set_test_key`.
 
 The test procedure (invoked by pytest) checks whether a registration
 information is stored in one of the server properties or otherwise
-- offers to register this test in the currently running database ONLY if this
-  is empty.
+
+- offers to register this test in the currently running database ONLY if this is
+  empty.
 - fails otherwise with a RuntimeError
 
-NOTE: you probably need to use pytest with the -s option to be able to
-      register the test interactively. Otherwise, the server property has to be
-      set before server start-up in the server.conf of the LinkAhead server.
+.. note::
+
+    you probably need to use pytest with the -s option to be able to
+    register the test interactively. Otherwise, the server property
+    has to be set before server start-up in the server.conf of the
+    LinkAhead server.
 
 This module is intended to be used with pytest.
 
-There is a pytest fixture "clear_database" that performs the above mentioned
-checks and clears the database in case of success.
+There is a pytest fixture
+:py:meth:`~linkahead.utils.register_tests.clear_database` that
+performs the above mentioned checks and clears the database in case of
+success.
+
 """
 
+import linkahead as db
+from linkahead import administration as admin
+
 TEST_KEY = None
 
 
-def set_test_key(KEY):
+def set_test_key(KEY: str):
+    """Set the global ``TEST_KEY`` variable to `KEY`. Afterwards, if
+    `KEY` matches the ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server
+    environment variable, mehtods like :py:meth:`clear_database` can
+    be used. Call this function in the beginning of your test file.
+
+    Parameters
+    ----------
+    KEY : str
+        key with which the test using this function is registered and
+        which is checked against the
+        ``_CAOSDB_INTEGRATION_TEST_SUITE_KEY`` server environment
+        variable.
+
+    """
     global TEST_KEY
     TEST_KEY = KEY
 
@@ -122,10 +140,14 @@ try:
 
     @pytest.fixture
     def clear_database():
-        """Remove Records, RecordTypes, Properties, and Files ONLY IF the LinkAhead
-        server the current connection points to was registered with the appropriate key.
+        """Remove Records, RecordTypes, Properties, and Files ONLY IF
+        the LinkAhead server the current connection points to was
+        registered with the appropriate key using
+        :py:meth:`set_test_key`.
+
+        PyTestInfo Records and the corresponding RecordType and
+        Property are preserved.
 
-        PyTestInfo Records and the corresponding RecordType and Property are preserved.
         """
         _assure_test_is_registered()
         yield _clear_database()  # called before the test function
diff --git a/unittests/test_entity.py b/unittests/test_entity.py
index 855e5a39d53180d32a40de46bc7bb43d0bbd58bc..f2164d9680471e0ed52b47943f0108ef7e4ce60f 100644
--- a/unittests/test_entity.py
+++ b/unittests/test_entity.py
@@ -30,7 +30,9 @@ import linkahead
 from linkahead import (INTEGER, Entity, Parent, Property, Record, RecordType,
                        configure_connection)
 import warnings
-from linkahead.common.models import SPECIAL_ATTRIBUTES
+from linkahead.common.models import (SPECIAL_ATTRIBUTES, get_id_from_versionid,
+                                     value_matches_versionid)
+from linkahead.common.versioning import Version
 from linkahead.connection.mockup import MockUpServerConnection
 from lxml import etree
 from pytest import raises
@@ -295,3 +297,23 @@ def test_filter_by_identity():
         t.parents.filter(pid=234)
         assert issubclass(w[-1].category, DeprecationWarning)
         assert "This function was renamed" in str(w[-1].message)
+
+
+def test_value_matches_versionid():
+    assert value_matches_versionid(234) is False, "integer is no version id"
+    assert value_matches_versionid("234") is False, ("string that only contains an integer is no "
+                                                     "version id")
+    assert value_matches_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") is True, (
+        "integer is no version id") is True
+    with raises(ValueError):
+        value_matches_versionid(234.0)
+
+
+def test_get_id_from_versionid():
+    assert get_id_from_versionid("234@bfe1a42cb37aae8ac625a757715d38814c274158") == "234"
+
+
+def test_get_versionid():
+    e = Entity(id=234)
+    e.version = Version(id="bfe1a42cb37aae8ac625a757715d38814c274158")
+    assert e.get_versionid() == "234@bfe1a42cb37aae8ac625a757715d38814c274158"
diff --git a/unittests/test_high_level_api.py b/unittests/test_high_level_api.py
index 82c1a5caf0f0719b5946ecd6749b4079bb6794bc..e35dc678f7d0f44d1bb8fa763cf8dfc8225e3aee 100644
--- a/unittests/test_high_level_api.py
+++ b/unittests/test_high_level_api.py
@@ -322,6 +322,7 @@ def test_wrong_entity_for_file():
 
 
 def test_serialization():
+    # With ID
     r = db.Record(id=5, name="test", description="ok")
     r.add_property(name="v", value=15, datatype=db.INTEGER, unit="kpx",
                    importance="RECOMMENDED")
@@ -333,6 +334,22 @@ def test_serialization():
     for teststr in teststrs:
         assert teststr in text
 
+    serialized = convert_to_python_object(r).serialize()
+    assert serialized == {'role': 'Record',
+                          'name': 'test',
+                          'id': 5,
+                          'description': 'ok',
+                          'properties': {'v': 15},
+                          'parents': [],
+                          'metadata': {'v': {'unit': 'kpx',
+                                             'datatype': 'INTEGER',
+                                             'importance': 'RECOMMENDED'}}}
+
+    serialized_plain = convert_to_python_object(r).serialize(plain_json=True)
+    assert serialized_plain == {'id': 5, 'name': 'test', 'v': 15}
+
+    # Without ID
+
     r = db.Record(description="ok")
     r.add_property(name="v", value=15, datatype=db.INTEGER, unit="kpx",
                    importance="RECOMMENDED")
@@ -341,6 +358,18 @@ def test_serialization():
     assert "name" not in text
     assert "id" not in text
 
+    serialized = convert_to_python_object(r).serialize()
+    assert serialized == {'role': 'Record',
+                          'description': 'ok',
+                          'properties': {'v': 15},
+                          'parents': [],
+                          'metadata': {'v': {'unit': 'kpx',
+                                             'datatype': 'INTEGER',
+                                             'importance': 'RECOMMENDED'}}}
+
+    serialized_plain = convert_to_python_object(r).serialize(plain_json=True)
+    assert serialized_plain == {'id': None, 'name': None, 'v': 15}
+
 
 def test_files():
     # empty file:
diff --git a/unittests/test_issues.py b/unittests/test_issues.py
index 3b0117b28c1300ea1eb0919fce02e3881c2ab025..ed125df9103c8f9c9a69fe8632265d3f38c377dc 100644
--- a/unittests/test_issues.py
+++ b/unittests/test_issues.py
@@ -93,6 +93,12 @@ def test_issue_128():
     assert prop_list.value == [now, now]
 
 
+def test_parse_datatype():
+    """No infinite recursion."""
+    from linkahead.common.models import _parse_value
+    assert 1 == _parse_value("labels0", "1")
+
+
 def test_issue_73():
     """
     Test to_xml infinite recursion handling with cross- and self-references.