From ec89a3a4452855db191cf4cfa5ff5fc08871e20a Mon Sep 17 00:00:00 2001 From: florian <f.spreckelsen@inidscale.com> Date: Fri, 13 May 2022 17:37:57 +0200 Subject: [PATCH] MAINT: Remove commented debug code --- unittests/test_tool.py | 70 +++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 25 deletions(-) diff --git a/unittests/test_tool.py b/unittests/test_tool.py index 404d04f1..824764ab 100755 --- a/unittests/test_tool.py +++ b/unittests/test_tool.py @@ -74,8 +74,10 @@ def test_record_structure_generation(crawler): subd = crawler.debug_tree[dircheckstr("DataAnalysis")] subc = crawler.debug_metadata["copied"][dircheckstr("DataAnalysis")] assert len(subd) == 2 - assert len(subd[0]) == 2 # variables store on Data Analysis node of debug tree - assert len(subd[1]) == 0 # record store on Data Analysis node of debug tree + # variables store on Data Analysis node of debug tree + assert len(subd[0]) == 2 + # record store on Data Analysis node of debug tree + assert len(subd[1]) == 0 assert len(subc) == 2 assert len(subc[0]) == 2 assert len(subc[1]) == 0 @@ -84,7 +86,8 @@ def test_record_structure_generation(crawler): assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis" assert subc[0]["DataAnalysis"] == False - subd = crawler.debug_tree[dircheckstr("DataAnalysis", "2020_climate-model-predict")] + subd = crawler.debug_tree[dircheckstr( + "DataAnalysis", "2020_climate-model-predict")] subc = crawler.debug_metadata["copied"][dircheckstr( "DataAnalysis", "2020_climate-model-predict")] @@ -92,7 +95,8 @@ def test_record_structure_generation(crawler): assert len(subd[1]["Project"].get_parents()) == 1 assert subd[1]["Project"].get_parents()[0].name == "Project" assert subd[1]["Project"].get_property("date").value == "2020" - assert subd[1]["Project"].get_property("identifier").value == "climate-model-predict" + assert subd[1]["Project"].get_property( + "identifier").value == "climate-model-predict" assert len(subd[0]) == 6 assert subd[0]["date"] == "2020" @@ -129,15 +133,19 @@ def test_record_structure_generation(crawler): assert len(subd[1]["Project"].get_parents()) == 1 assert subd[1]["Project"].get_parents()[0].name == "Project" assert subd[1]["Project"].get_property("date").value == "2020" - assert subd[1]["Project"].get_property("identifier").value == "climate-model-predict" + assert subd[1]["Project"].get_property( + "identifier").value == "climate-model-predict" assert len(subd[1]["Measurement"].get_parents()) == 1 assert subd[1]["Measurement"].get_parents()[0].name == "Measurement" assert subd[1]["Measurement"].get_property("date").value == "2020-02-08" - assert subd[1]["Measurement"].get_property("identifier").value == "prediction-errors" + assert subd[1]["Measurement"].get_property( + "identifier").value == "prediction-errors" assert subd[1]["Measurement"].get_property("project").value != "$Project" - assert subd[1]["Measurement"].get_property("project").value.__class__ == db.Record - assert subd[1]["Measurement"].get_property("project").value == subd[0]["Project"] + assert subd[1]["Measurement"].get_property( + "project").value.__class__ == db.Record + assert subd[1]["Measurement"].get_property( + "project").value == subd[0]["Project"] # Check the copy flags for the second level in the hierarchy: assert subc[1]["Project"] is True @@ -176,9 +184,12 @@ def test_crawler_update_list(crawler, ident): # If the following assertions fail, that is a hint, that the test file records.xml has changed # and this needs to be updated: assert len(ident.get_records()) == 18 - assert len([r for r in ident.get_records() if r.parents[0].name == "Person"]) == 5 - assert len([r for r in ident.get_records() if r.parents[0].name == "Measurement"]) == 11 - assert len([r for r in ident.get_records() if r.parents[0].name == "Project"]) == 2 + assert len([r for r in ident.get_records() + if r.parents[0].name == "Person"]) == 5 + assert len([r for r in ident.get_records() + if r.parents[0].name == "Measurement"]) == 11 + assert len([r for r in ident.get_records() + if r.parents[0].name == "Project"]) == 2 # The crawler contains lots of duplicates, because identifiables have not been resolved yet: assert len(ident.get_records()) != len(crawler.updateList) @@ -194,8 +205,10 @@ def test_crawler_update_list(crawler, ident): id_r0 = ident.get_identifiable(r_cur) assert r_cur.parents[0].name == id_r0.parents[0].name - assert r_cur.get_property("first_name").value == id_r0.get_property("first_name").value - assert r_cur.get_property("last_name").value == id_r0.get_property("last_name").value + assert r_cur.get_property( + "first_name").value == id_r0.get_property("first_name").value + assert r_cur.get_property( + "last_name").value == id_r0.get_property("last_name").value assert len(r_cur.parents) == 1 assert len(id_r0.parents) == 1 assert len(r_cur.properties) == 2 @@ -213,9 +226,11 @@ def test_crawler_update_list(crawler, ident): id_r1 = ident.get_identifiable(r_cur) assert r_cur.parents[0].name == id_r1.parents[0].name - assert r_cur.get_property("identifier").value == id_r1.get_property("identifier").value + assert r_cur.get_property( + "identifier").value == id_r1.get_property("identifier").value assert r_cur.get_property("date").value == id_r1.get_property("date").value - assert r_cur.get_property("project").value == id_r1.get_property("project").value + assert r_cur.get_property( + "project").value == id_r1.get_property("project").value assert len(r_cur.parents) == 1 assert len(id_r1.parents) == 1 assert len(r_cur.properties) == 4 @@ -228,7 +243,8 @@ def test_crawler_update_list(crawler, ident): assert idr_r1_test != idr_r0_test assert len(idr_r1.properties) == 4 - assert r_cur.get_property("responsible").value == idr_r1.get_property("responsible").value + assert r_cur.get_property( + "responsible").value == idr_r1.get_property("responsible").value assert r_cur.description == idr_r1.description # test whether compare_entites function works in this context: @@ -249,13 +265,11 @@ def test_crawler_update_list(crawler, ident): def test_synchronization(crawler, ident): insl, updl = crawler.synchronize(commit_changes=False) - # breakpoint() - # ident.check_record(ident.get_records()[1], insl[0]) assert len(insl) == 0 assert len(updl) == 0 -def test_identifiable_adapter(): +def test_identifiablxe_adapter(): query = IdentifiableAdapter.create_query_for_identifiable( db.Record().add_parent("Person") .add_property("first_name", value="A") @@ -357,14 +371,17 @@ def test_split_into_inserts_and_updates_trivial(crawler): def test_split_into_inserts_and_updates_single(mock_retrieve): crawler = mock_retrieve - entlist = [db.Record(name="A").add_parent("C"), db.Record(name="B").add_parent("C")] + entlist = [db.Record(name="A").add_parent( + "C"), db.Record(name="B").add_parent("C")] assert crawler.get_identified_record_from_local_cache(entlist[0]) is None assert crawler.get_identified_record_from_local_cache(entlist[1]) is None assert crawler.can_be_checked_externally(entlist[0]) assert crawler.can_be_checked_externally(entlist[1]) - assert crawler.identifiableAdapter.retrieve_identified_record_for_record(entlist[0]).id == 1111 - assert crawler.identifiableAdapter.retrieve_identified_record_for_record(entlist[1]) is None + assert crawler.identifiableAdapter.retrieve_identified_record_for_record( + entlist[0]).id == 1111 + assert crawler.identifiableAdapter.retrieve_identified_record_for_record( + entlist[1]) is None insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist)) assert len(insert) == 1 @@ -418,7 +435,8 @@ def test_split_into_inserts_and_updates_with_complex(mock_retrieve): # ^ # | # F <- B <- G - a = db.Record(name="A").add_parent("C").add_property('d', 13).add_property('e', "lskdjlsfdj") + a = db.Record(name="A").add_parent("C").add_property( + 'd', 13).add_property('e', "lskdjlsfdj") b = db.Record(name="B").add_parent("C") g = db.Record(name="G").add_parent("C") f = db.Record(name="F").add_parent("C") @@ -459,7 +477,8 @@ def test_all_references_are_existing_already(crawler): base_mocked_lookup, known={"A": db.Record(name="A").add_parent("C"), "B": db.Record(name="B").add_parent("C")})) - assert crawler.all_references_are_existing_already(db.Record().add_property('a', 123)) + assert crawler.all_references_are_existing_already( + db.Record().add_property('a', 123)) assert crawler.all_references_are_existing_already(db.Record() .add_property('a', db.Record(id=123))) assert crawler.all_references_are_existing_already(db.Record() @@ -477,7 +496,8 @@ def test_all_references_are_existing_already(crawler): def test_can_be_checked_externally(crawler): - assert crawler.can_be_checked_externally(db.Record().add_property('a', 123)) + assert crawler.can_be_checked_externally( + db.Record().add_property('a', 123)) assert crawler.can_be_checked_externally(db.Record() .add_property('a', db.Record(id=123))) assert crawler.can_be_checked_externally(db.Record() -- GitLab