From d2dc488d47598b227045b056d3d88e22e353bb05 Mon Sep 17 00:00:00 2001 From: Alexander Schlemmer <alexander@mail-schlemmer.de> Date: Mon, 27 Jun 2022 12:24:32 +0200 Subject: [PATCH] MAINT: made type hints backwards compatible --- src/caoscrawler/crawl.py | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py index 40a8a026..acd91e88 100644 --- a/src/caoscrawler/crawl.py +++ b/src/caoscrawler/crawl.py @@ -44,7 +44,7 @@ from .identifiable_adapters import (IdentifiableAdapter, LocalStorageIdentifiableAdapter, CaosDBIdentifiableAdapter) from collections import defaultdict -from typing import Union, Any, Optional, Type +from typing import Union, Any, Optional, Type, List, Dict from caosdb.apiutils import compare_entities, merge_entities from copy import deepcopy from jsonschema import validate @@ -158,7 +158,7 @@ class Crawler(object): storage for values (general store). """ - def __init__(self, converters: list[Converter] = [], + def __init__(self, converters: List[Converter] = [], generalStore: Optional[GeneralStore] = None, debug: bool = False, identifiableAdapter: IdentifiableAdapter = None): @@ -196,8 +196,8 @@ class Crawler(object): # order in the tuple: # 0: generalStore # 1: recordStore - self.debug_tree: dict[str, tuple] = dict() - self.debug_metadata: dict[str, dict] = dict() + self.debug_tree: Dict[str, tuple] = dict() + self.debug_metadata: Dict[str, dict] = dict() self.debug_metadata["copied"] = dict() self.debug_metadata["provenance"] = defaultdict(lambda: dict()) self.debug_metadata["usage"] = defaultdict(lambda: set()) @@ -269,7 +269,7 @@ class Crawler(object): """ # Defaults for the converter registry: - converter_registry: dict[str, dict[str, str]] = { + converter_registry: Dict[str, Dict[str, str]] = { "Directory": { "converter": "DirectoryConverter", "package": "caoscrawler.converters"}, @@ -384,7 +384,7 @@ class Crawler(object): return local_converters - def start_crawling(self, items: Union[list[StructureElement], StructureElement], + def start_crawling(self, items: Union[List[StructureElement], StructureElement], crawler_definition: dict, converter_registry: dict, file_path_prefix: str): @@ -410,7 +410,7 @@ class Crawler(object): local_converters = Crawler.create_local_converters(crawler_definition, converter_registry) # This recursive crawling procedure generates the update list: - self.updateList: list[db.Record] = [] + self.updateList: List[db.Record] = [] self._crawl(items, self.global_converters, local_converters, self.generalStore, self.recordStore, @@ -457,7 +457,7 @@ class Crawler(object): return False return True - def create_flat_list(self, ent_list: list[db.Entity], flat: list[db.Entity], visited=None): + def create_flat_list(self, ent_list: List[db.Entity], flat: List[db.Entity], visited=None): """ Recursively adds all properties contained in entities from ent_list to the output list flat. Each element will only be added once to the list. @@ -598,11 +598,11 @@ class Crawler(object): merge_entities(to, fro) - def split_into_inserts_and_updates(self, ent_list: list[db.Entity]): + def split_into_inserts_and_updates(self, ent_list: List[db.Entity]): if self.identifiableAdapter is None: raise RuntimeError("Should not happen.") - to_be_inserted: list[db.Entity] = [] - to_be_updated: list[db.Entity] = [] + to_be_inserted: List[db.Entity] = [] + to_be_updated: List[db.Entity] = [] flat = list(ent_list) # assure all entities are direct members TODO Can this be removed at some point?Check only? self.create_flat_list(ent_list, flat) @@ -729,8 +729,8 @@ class Crawler(object): el.value[index] = val.id @staticmethod - def remove_unnecessary_updates(updateList: list[db.Record], - identified_records: list[db.Record]): + def remove_unnecessary_updates(updateList: List[db.Record], + identified_records: List[db.Record]): """ checks whether all relevant attributes (especially Property values) are equal @@ -825,7 +825,7 @@ class Crawler(object): traceback.print_exc() breakpoint() - def _synchronize(self, updateList: list[db.Record], commit_changes: bool = True): + def _synchronize(self, updateList: List[db.Record], commit_changes: bool = True): """ This function applies several stages: 1) Retrieve identifiables for all records in updateList. @@ -865,7 +865,7 @@ class Crawler(object): @staticmethod def debug_build_usage_tree(converter: Converter): - res: dict[str, dict[str, Any]] = { + res: Dict[str, Dict[str, Any]] = { converter.name: { "usage": ", ".join(converter.metadata["usage"]), "subtree": {} @@ -882,7 +882,7 @@ class Crawler(object): return res def save_debug_data(self, filename: str): - paths: dict[str, Union[dict, list]] = dict() + paths: Dict[str, Union[Dict, List]] = dict() def flatten_debug_info(key): mod_info = self.debug_metadata[key] @@ -907,12 +907,12 @@ class Crawler(object): with open(filename, "w") as f: f.write(yaml.dump(paths, sort_keys=False)) - def _crawl(self, items: list[StructureElement], - global_converters: list[Converter], - local_converters: list[Converter], + def _crawl(self, items: List[StructureElement], + global_converters: List[Converter], + local_converters: List[Converter], generalStore: GeneralStore, recordStore: RecordStore, - structure_elements_path: list[str], converters_path: list[str], + structure_elements_path: List[str], converters_path: List[str], file_path_prefix): """ Crawl a list of StructureElements and apply any matching converters. -- GitLab