diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py index 150a555124ee4266fb417e815ab149e5678a3474..f93c0ec436e4106c22f3bb065137394c54f12754 100644 --- a/src/caoscrawler/crawl.py +++ b/src/caoscrawler/crawl.py @@ -223,45 +223,42 @@ class Crawler(object): if generalStore is not None: warnings.warn(DeprecationWarning( "The generalStore argument of the Crawler class is deprecated and has no effect.")) - def load_converters(self, definition: dict): warnings.warn(DeprecationWarning( - "The function load_converters in the crawl module is deprecated. " + "The function load_converters in the crawl module is deprecated. " "Please use create_converter_registry from the scanner module.")) return create_converter_registry(definition) def load_definition(self, crawler_definition_path: str): warnings.warn(DeprecationWarning( - "The function load_definition in the crawl module is deprecated. " + "The function load_definition in the crawl module is deprecated. " "Please use load_definition from the scanner module.")) return load_definition(crawler_definition_path) def initialize_converters(self, crawler_definition: dict, converter_registry: dict): warnings.warn(DeprecationWarning( - "The function initialize_converters in the crawl module is deprecated. " + "The function initialize_converters in the crawl module is deprecated. " "Please use initialize_converters from the scanner module.")) return initialize_converters(crawler_definition, converter_registry) - + def generate_run_id(self): self.run_id = uuid.uuid1() - def start_crawling(self, items: Union[list[StructureElement], StructureElement], crawler_definition: dict, converter_registry: dict, restricted_path: Optional[list[str]] = None): warnings.warn(DeprecationWarning( - "The function start_crawling in the crawl module is deprecated. " - "Please use scan_structure_elements from the scanner module.")) - + "The function start_crawling in the crawl module is deprecated. " + "Please use scan_structure_elements from the scanner module.")) + self.generate_run_id() - + return scan_structure_elements( items, crawler_definition, converter_registry, restrict_path) - def crawl_directory(self, crawled_directory: str, crawler_definition_path: str, @@ -923,7 +920,6 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3])) Save the information contained in a debug_tree to a file named filename. """ - paths: dict[str, Union[dict, list]] = dict() def flatten_debug_info(key):