Skip to content
Snippets Groups Projects
Commit c7c65d0d authored by florian's avatar florian
Browse files

STY: autopep'd

parent b0eed6d9
Branches
Tags
2 merge requests!53Release 0.1,!25F macros
Pipeline #28925 passed
......@@ -239,7 +239,8 @@ class Crawler(object):
elif len(crawler_definitions) == 2:
crawler_definition = crawler_definitions[1]
else:
raise RuntimeError("Crawler definition must not contain more than two documents.")
raise RuntimeError(
"Crawler definition must not contain more than two documents.")
# TODO: at this point this function can already load the cfood schema extensions
# from the crawler definition and add them to the yaml schema that will be
......@@ -443,7 +444,8 @@ class Crawler(object):
items = [items]
self.run_id = uuid.uuid1()
local_converters = Crawler.initialize_converters(crawler_definition, converter_registry)
local_converters = Crawler.initialize_converters(
crawler_definition, converter_registry)
# This recursive crawling procedure generates the update list:
self.target_data: List[db.Record] = []
self._crawl(items,
......@@ -525,7 +527,7 @@ class Crawler(object):
if (isinstance(p.value, list)):
for el in p.value:
if (isinstance(el, db.Entity) and el.id is None
and self.get_identified_record_from_local_cache(el) is None):
and self.get_identified_record_from_local_cache(el) is None):
return False
if (isinstance(p.value, db.Entity) and p.value.id is None
and self.get_identified_record_from_local_cache(p.value) is None):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment