Skip to content
Snippets Groups Projects
Commit 221345d0 authored by florian's avatar florian
Browse files

STY: autopep8'd

parent 4c901109
No related branches found
No related tags found
2 merge requests!108Release 0.5.0,!104Create a new scanner module and move functions from crawl module there
Pipeline #34909 failed
......@@ -223,45 +223,42 @@ class Crawler(object):
if generalStore is not None:
warnings.warn(DeprecationWarning(
"The generalStore argument of the Crawler class is deprecated and has no effect."))
def load_converters(self, definition: dict):
warnings.warn(DeprecationWarning(
"The function load_converters in the crawl module is deprecated. "
"The function load_converters in the crawl module is deprecated. "
"Please use create_converter_registry from the scanner module."))
return create_converter_registry(definition)
def load_definition(self, crawler_definition_path: str):
warnings.warn(DeprecationWarning(
"The function load_definition in the crawl module is deprecated. "
"The function load_definition in the crawl module is deprecated. "
"Please use load_definition from the scanner module."))
return load_definition(crawler_definition_path)
def initialize_converters(self, crawler_definition: dict, converter_registry: dict):
warnings.warn(DeprecationWarning(
"The function initialize_converters in the crawl module is deprecated. "
"The function initialize_converters in the crawl module is deprecated. "
"Please use initialize_converters from the scanner module."))
return initialize_converters(crawler_definition, converter_registry)
def generate_run_id(self):
self.run_id = uuid.uuid1()
def start_crawling(self, items: Union[list[StructureElement], StructureElement],
crawler_definition: dict,
converter_registry: dict,
restricted_path: Optional[list[str]] = None):
warnings.warn(DeprecationWarning(
"The function start_crawling in the crawl module is deprecated. "
"Please use scan_structure_elements from the scanner module."))
"The function start_crawling in the crawl module is deprecated. "
"Please use scan_structure_elements from the scanner module."))
self.generate_run_id()
return scan_structure_elements(
items, crawler_definition, converter_registry, restrict_path)
def crawl_directory(self,
crawled_directory: str,
crawler_definition_path: str,
......@@ -923,7 +920,6 @@ ____________________\n""".format(i + 1, len(pending_changes)) + str(el[3]))
Save the information contained in a debug_tree to a file named filename.
"""
paths: dict[str, Union[dict, list]] = dict()
def flatten_debug_info(key):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment