diff --git a/src/caosdb/cached.py b/src/caosdb/cached.py
index ff885c115a04432229e6ae95217b2a8acaac7d14..4f735bb8e85ba7aa364b211e198840f9f6fb97e2 100644
--- a/src/caosdb/cached.py
+++ b/src/caosdb/cached.py
@@ -2,8 +2,9 @@
 #
 # This file is a part of the CaosDB Project.
 #
-# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com>
 # Copyright (C) 2023 IndiScale GmbH <info@indiscale.com>
+# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com>
+# Copyright (C) 2023 Daniel Hornung <d.hornung@indiscale.com>
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -23,14 +24,14 @@
 This module provides some cached versions of functions that retrieve Entities from a remote server.
 """
 
-from typing import Union
+from enum import Enum
 from functools import lru_cache
+from typing import Union
+
 from .utils import get_entity
 from .common.models import execute_query, Entity, Container
 
 
-from enum import Enum
-
 # roughly 1GB for typical entity sizes
 DEFAULT_SIZE = 33333
 
@@ -40,7 +41,7 @@ _DUMMY_CACHE = {}
 
 class AccessType(Enum):
     """Different access types for cached queries.  Needed for filling the cache manually with
-:func:`fill_cache` .
+:func:`cache_fill` .
 
     """
     QUERY = 1
@@ -84,7 +85,7 @@ If a query phrase is given, the result must be unique.  If this is not what you
 
 
 def cached_query(query_string) -> Container:
-    """A cached version of the :func:`caosdb.execute_query<caosdb.common.models.execute_query>` function.
+    """A cached version of :func:`caosdb.execute_query<caosdb.common.models.execute_query>`.
 
 All additional arguments are at their default values.
 
@@ -120,21 +121,28 @@ def cache_clear() -> None:
 
 
 def cache_info():
-    """Empty the cache that is used by `cached_query` and `cached_get_entity_by`."""
+    """Return info about the cache that is used by `cached_query` and `cached_get_entity_by`.
+
+Returns
+-------
+
+out: named tuple
+  See the standard library :func:`functools.lru_cache` for details."""
     return _cached_access.cache_info()
 
 
 def cache_initialize(maxsize=DEFAULT_SIZE) -> None:
     """Create a new cache with the given size for `cached_query` and `cached_get_entity_by`.
 
-    The old cache is removed with all its content.
+    This implies a call of :func:`cache_clear`, the old cache is emptied.
 
     """
+    cache_clear()
     global _cached_access
     _cached_access = lru_cache(maxsize=maxsize)(_cached_access.__wrapped__)
 
 
-def fill_cache(items: dict, kind: AccessType = AccessType.EID, unique: bool = True) -> None:
+def cache_fill(items: dict, kind: AccessType = AccessType.EID, unique: bool = True) -> None:
     """Add entries to the cache manually.
 
     This allows to fill the cache without actually submitting queries.  Note that this does not
diff --git a/src/doc/tutorials/caching.rst b/src/doc/tutorials/caching.rst
index ef4b55ad14aae029099565ba40860353dc9b1264..23a2be96d01224e28778f187b43e5ce3c79c125b 100644
--- a/src/doc/tutorials/caching.rst
+++ b/src/doc/tutorials/caching.rst
@@ -22,22 +22,22 @@ function, easily created from ``get_entity_by_name`` using Python's ``lru_cache`
    cached_get_by_name.cache_clear()
 
 For convenience, PyCaosDB provides the ``caosdb.cached`` module that defines the functions
-``cached_query`` and ``cached_get_by``, they use a shared cache. Let's have a look:
+``cached_query`` and ``cached_get_entity_by``, they use a shared cache. Let's have a look:
 
 .. code:: python
 
-   from caosdb.cached import cached_query, cached_get_by, cache_clear, cache_info, initialize_cache
-   rt1 = cached_get_by(name='RT1')
+   from caosdb.cached import cached_query, cached_get_entity_by, cache_clear, cache_info, cache_initialize
+   rt1 = cached_get_entity_by(name='RT1')
    qresult = cached_query('FIND Experiment WITH parameter=1')
    # you can inspect the cache
    print(cache_info())
    # this will not cause a server request since it is cached
-   rt1 = cached_get_by(name='RT1')
+   rt1 = cached_get_entity_by(name='RT1')
    # you can clear the cache with
    cache_clear()
    # If you want to have a cache with a custom size, you can initialize it (again). Old cached
    # data is lost.
-   initialize_cache(size=10)
+   cache_initialize(maxsize=10)
 
 
 If you want to manually add entities to the cache, you can do it yourself. This is useful when you
@@ -45,12 +45,12 @@ have entities on hand from previous queries that you want to add.
 
 .. code:: python
 
-   from caosdb.cached import fill_cache, AccessType
+   from caosdb.cached import cache_fill, AccessType
    # Here, items must be a dict with Entity IDs as keys and the Entities as values.
-   fill_cache(items, AccessType.EID, unique=True)
+   cache_fill(items, AccessType.EID, unique=True)
    # If you now use IDs that were in items, they are taken from the cache.
-   e1 = cached_get_by(eid=10001)
+   e1 = cached_get_entity_by(eid=10001)
 
-For the cached entity getter functions (``cached_get_by``) you need to set ``unique=True``.
+For the cached entity getter functions (``cached_get_entity_by``) you need to set ``unique=True``.
 
 
diff --git a/unittests/test_cached.py b/unittests/test_cached.py
index d1f8606844833a3d21441fd6426f13d3e382607c..6e87c12a5265389d398bf18194ffb093f69be493 100644
--- a/unittests/test_cached.py
+++ b/unittests/test_cached.py
@@ -193,7 +193,7 @@ def test_cached_query(mocked_query):
     cached_query('stuff')
     assert mocked_query.call_count == 2
     # we fill the cache manually and make sure the element is used
-    fill_cache({'lol': db.Container().extend([db.Entity(id=10001, name='lol')])},
+    cache_fill({'lol': db.Container().extend([db.Entity(id=10001, name='lol')])},
                AccessType.QUERY, unique=False)
     # there are now two elements in the cache: a and lol
     assert cache_info().currsize == 2