diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 66a81c77605cd42f772ad7aa7d73c1b02c702d55..bfc5f357725cb186db19c8d9f2e2485cd093886f 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -1141,6 +1141,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
         # This path_found variable stores wether the path given by restricted_path was found in the
         # data tree
         path_found = False
+        # at_least_one_match = False
         if restricted_path is not None and len(restricted_path) == 0:
             restricted_path = None
 
@@ -1152,6 +1153,7 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
                 if (converter.typecheck(element) and (
                         restricted_path is None or element.name == restricted_path[0])
                         and converter.match(element) is not None):
+                    # at_least_one_match = True
                     path_found = True
                     generalStore_copy = generalStore.create_scoped_copy()
                     recordStore_copy = recordStore.create_scoped_copy()
@@ -1195,10 +1197,14 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
                                 structure_elements_path + [element.get_name()],
                                 converters_path + [converter.name],
                                 restricted_path[1:] if restricted_path is not None else None)
+                
 
         if restricted_path and not path_found:
             raise RuntimeError("A 'restricted_path' argument was given that is not contained in "
                                "the data tree")
+        # if not at_least_one_match:
+        #     run_ELSE_converter()
+            
         # if the crawler is running out of scope, copy all records in
         # the recordStore, that were created in this scope
         # to the general update container.