diff --git a/src/caoscrawler/converters.py b/src/caoscrawler/converters.py
index b9070c121133fa8cb406d89f425f4d9674a0b2cf..708a3c40c2b94062f5ab8fede47a884b8103fb16 100644
--- a/src/caoscrawler/converters.py
+++ b/src/caoscrawler/converters.py
@@ -684,6 +684,15 @@ class MarkdownFileConverter(SimpleFileConverter):
                 "Error during the validation (yaml header cannot be read) of the markdown file "
                 "located at the following node in the data structure:\n"
                 f"{path}")
+        except yaml_header_tools.ParseErrorsInHeader as err:
+            if generalStore is not None and self.name in generalStore:
+                path = generalStore[self.name]
+            else:
+                path = "<path not set>"
+            raise ConverterValidationError(
+                "Error during the validation (yaml header cannot be read) of the markdown file "
+                "located at the following node in the data structure:\n"
+                "{}\nError:\n{}".format(path, err))
         children: List[StructureElement] = []
 
         for name, entry in header.items():
@@ -692,8 +701,12 @@ class MarkdownFileConverter(SimpleFileConverter):
             elif type(entry) == str:
                 children.append(TextElement(name, entry))
             else:
+                if generalStore is not None and self.name in generalStore:
+                    path = generalStore[self.name]
+                else:
+                    path = "<path not set>"
                 raise RuntimeError(
-                    "Header entry {} has incompatible type.".format(name))
+                    "Header entry {} has incompatible type.\nFilename: {}".format(name, path))
         return children
 
 
diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 2aeb220cb3279c5bca367305f374218c4ce5c304..32ad2a34e4f63c1cf05ea1760eb434a06fffa7de 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -32,7 +32,6 @@ the acuired data with CaosDB.
 from __future__ import annotations
 
 import argparse
-import importlib
 import logging
 import os
 import sys
@@ -40,28 +39,24 @@ import traceback
 import uuid
 import warnings
 from argparse import RawTextHelpFormatter
-from collections import defaultdict
 from copy import deepcopy
 from datetime import datetime
 from enum import Enum
-from typing import Any, Optional, Type, Union
+from typing import Any, Optional, Union
 
 import caosdb as db
 import yaml
-from caosadvancedtools.cache import Cache, UpdateCache
+from caosadvancedtools.cache import UpdateCache
 from caosadvancedtools.crawler import Crawler as OldCrawler
 from caosadvancedtools.serverside.helper import send_mail
 from caosadvancedtools.utils import create_entity_link
 from caosdb.apiutils import (EntityMergeConflictError, compare_entities,
                              merge_entities)
 from caosdb.cached import cache_clear, cached_get_entity_by
-from caosdb.common.datatype import is_reference
 from caosdb.exceptions import EmptyUniqueQueryError
-from importlib_resources import files
-from jsonschema import validate
 
 from .config import get_config_setting
-from .converters import Converter, ConverterValidationError, DirectoryConverter
+from .converters import Converter, ConverterValidationError
 from .debug_tree import DebugTree
 from .identifiable import Identifiable
 from .identifiable_adapters import (CaosDBIdentifiableAdapter,
@@ -72,9 +67,8 @@ from .logging import configure_server_side_logging
 from .macros import defmacro_constructor, macro_constructor
 from .scanner import (create_converter_registry, initialize_converters,
                       load_definition, scan_directory, scan_structure_elements)
-from .stores import GeneralStore, RecordStore
-from .structure_elements import Directory, NoneElement, StructureElement
-from .version import check_cfood_version
+from .stores import GeneralStore
+from .structure_elements import StructureElement
 
 logger = logging.getLogger(__name__)
 
diff --git a/src/doc/macros.rst b/src/doc/macros.rst
index 5d8a411607af223c5b8d65b1553e710553d998f0..560827e6fc4ff8b0238f16ca8d76b2c682bce505 100644
--- a/src/doc/macros.rst
+++ b/src/doc/macros.rst
@@ -231,3 +231,43 @@ positions. Consider:
 
 However, this should not be a real limitation, as the crawler is designed in a way,
 that the order of the nodes in the same level should not matter.
+
+
+Using macros within macro definitions
+=====================================
+
+It is possible to use other macros in macro definitions. Again, examples can be found in
+the macro unit tests (see e.g. :func:`unittests.test_macros.test_macros_in_macros`):
+
+.. _example_macros_in_macros:
+.. code-block:: yaml
+
+  ---
+  metadata:
+    crawler-version: 0.3.1
+    macros:
+      - !defmacro
+        name: one_macro
+        params:
+          a: 25
+        definition:
+          macro_sub_$a:
+            b: $a
+            another_param: 3
+      - !defmacro
+        name: test_macrodef
+        params: {}
+        definition:
+          macro_top: !macro
+            one_macro:
+            - a: 17
+            - {}
+            - a: 98
+            not_macro:
+              a: 26
+  ---
+  extroot: !macro
+      test_macrodef:
+
+TODO:
+to be continued