diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 6d26553bead151a0173202239af1ee51ef6b5fde..ad320c6f15e1a060ba41db0e301e7b7424e01ae1 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -1283,7 +1283,8 @@ def crawler_main(crawled_directory_path: str,
         whether or not to update or insert entities inspite of name conflicts
     restricted_path: optional, list of strings
             Traverse the data tree only along the given path. When the end of the given path
-            is reached, traverse the full tree as normal.
+            is reached, traverse the full tree as normal. See docstring of 'scanner' in
+            module 'scanner' for more details.
     remove_prefix : Optional[str]
         Remove the given prefix from file paths.
         See docstring of '_fix_file_paths' for more details.
diff --git a/src/caoscrawler/scanner.py b/src/caoscrawler/scanner.py
index c5e078c582a22477e4bddfdae3048bdbc1e0fe06..5bd662d3fb8efd77564066eae353a17c499d62e8 100644
--- a/src/caoscrawler/scanner.py
+++ b/src/caoscrawler/scanner.py
@@ -235,7 +235,7 @@ def scanner(items: list[StructureElement],
 
     restricted_path: optional, list of strings, traverse the data tree only along the given
                      path. For example, when a directory contains files a, b and c and b is
-                     given in restricted_path, a and c will be ignroed by the crawler.
+                     given as restricted_path, a and c will be ignroed by the crawler.
                      When the end of the given path is reached, traverse the full tree as
                      normal. The first element of the list provided by restricted_path should
                      be the name of the StructureElement at this level, i.e. denoting the
@@ -357,7 +357,8 @@ def scan_directory(dirname: str, crawler_definition_path: str,
 
     restricted_path: optional, list of strings
             Traverse the data tree only along the given path. When the end of the given path
-            is reached, traverse the full tree as normal.
+            is reached, traverse the full tree as normal. See docstring of 'scanner' for
+            more details.
     """
 
     crawler_definition = load_definition(crawler_definition_path)
@@ -408,7 +409,8 @@ def scan_structure_elements(items: Union[list[StructureElement], StructureElemen
          file.
     restricted_path: optional, list of strings
          Traverse the data tree only along the given path. When the end of the given path
-         is reached, traverse the full tree as normal.
+         is reached, traverse the full tree as normal. See docstring of 'scanner' for
+            more details.
 
     Returns
     -------