Skip to content
Snippets Groups Projects
Commit ec89a3a4 authored by florian's avatar florian
Browse files

MAINT: Remove commented debug code

parent 6fd2df83
No related branches found
No related tags found
2 merge requests!53Release 0.1,!21F use substitution templates
Pipeline #22937 passed with warnings
...@@ -74,8 +74,10 @@ def test_record_structure_generation(crawler): ...@@ -74,8 +74,10 @@ def test_record_structure_generation(crawler):
subd = crawler.debug_tree[dircheckstr("DataAnalysis")] subd = crawler.debug_tree[dircheckstr("DataAnalysis")]
subc = crawler.debug_metadata["copied"][dircheckstr("DataAnalysis")] subc = crawler.debug_metadata["copied"][dircheckstr("DataAnalysis")]
assert len(subd) == 2 assert len(subd) == 2
assert len(subd[0]) == 2 # variables store on Data Analysis node of debug tree # variables store on Data Analysis node of debug tree
assert len(subd[1]) == 0 # record store on Data Analysis node of debug tree assert len(subd[0]) == 2
# record store on Data Analysis node of debug tree
assert len(subd[1]) == 0
assert len(subc) == 2 assert len(subc) == 2
assert len(subc[0]) == 2 assert len(subc[0]) == 2
assert len(subc[1]) == 0 assert len(subc[1]) == 0
...@@ -84,7 +86,8 @@ def test_record_structure_generation(crawler): ...@@ -84,7 +86,8 @@ def test_record_structure_generation(crawler):
assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis" assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis"
assert subc[0]["DataAnalysis"] == False assert subc[0]["DataAnalysis"] == False
subd = crawler.debug_tree[dircheckstr("DataAnalysis", "2020_climate-model-predict")] subd = crawler.debug_tree[dircheckstr(
"DataAnalysis", "2020_climate-model-predict")]
subc = crawler.debug_metadata["copied"][dircheckstr( subc = crawler.debug_metadata["copied"][dircheckstr(
"DataAnalysis", "2020_climate-model-predict")] "DataAnalysis", "2020_climate-model-predict")]
...@@ -92,7 +95,8 @@ def test_record_structure_generation(crawler): ...@@ -92,7 +95,8 @@ def test_record_structure_generation(crawler):
assert len(subd[1]["Project"].get_parents()) == 1 assert len(subd[1]["Project"].get_parents()) == 1
assert subd[1]["Project"].get_parents()[0].name == "Project" assert subd[1]["Project"].get_parents()[0].name == "Project"
assert subd[1]["Project"].get_property("date").value == "2020" assert subd[1]["Project"].get_property("date").value == "2020"
assert subd[1]["Project"].get_property("identifier").value == "climate-model-predict" assert subd[1]["Project"].get_property(
"identifier").value == "climate-model-predict"
assert len(subd[0]) == 6 assert len(subd[0]) == 6
assert subd[0]["date"] == "2020" assert subd[0]["date"] == "2020"
...@@ -129,15 +133,19 @@ def test_record_structure_generation(crawler): ...@@ -129,15 +133,19 @@ def test_record_structure_generation(crawler):
assert len(subd[1]["Project"].get_parents()) == 1 assert len(subd[1]["Project"].get_parents()) == 1
assert subd[1]["Project"].get_parents()[0].name == "Project" assert subd[1]["Project"].get_parents()[0].name == "Project"
assert subd[1]["Project"].get_property("date").value == "2020" assert subd[1]["Project"].get_property("date").value == "2020"
assert subd[1]["Project"].get_property("identifier").value == "climate-model-predict" assert subd[1]["Project"].get_property(
"identifier").value == "climate-model-predict"
assert len(subd[1]["Measurement"].get_parents()) == 1 assert len(subd[1]["Measurement"].get_parents()) == 1
assert subd[1]["Measurement"].get_parents()[0].name == "Measurement" assert subd[1]["Measurement"].get_parents()[0].name == "Measurement"
assert subd[1]["Measurement"].get_property("date").value == "2020-02-08" assert subd[1]["Measurement"].get_property("date").value == "2020-02-08"
assert subd[1]["Measurement"].get_property("identifier").value == "prediction-errors" assert subd[1]["Measurement"].get_property(
"identifier").value == "prediction-errors"
assert subd[1]["Measurement"].get_property("project").value != "$Project" assert subd[1]["Measurement"].get_property("project").value != "$Project"
assert subd[1]["Measurement"].get_property("project").value.__class__ == db.Record assert subd[1]["Measurement"].get_property(
assert subd[1]["Measurement"].get_property("project").value == subd[0]["Project"] "project").value.__class__ == db.Record
assert subd[1]["Measurement"].get_property(
"project").value == subd[0]["Project"]
# Check the copy flags for the second level in the hierarchy: # Check the copy flags for the second level in the hierarchy:
assert subc[1]["Project"] is True assert subc[1]["Project"] is True
...@@ -176,9 +184,12 @@ def test_crawler_update_list(crawler, ident): ...@@ -176,9 +184,12 @@ def test_crawler_update_list(crawler, ident):
# If the following assertions fail, that is a hint, that the test file records.xml has changed # If the following assertions fail, that is a hint, that the test file records.xml has changed
# and this needs to be updated: # and this needs to be updated:
assert len(ident.get_records()) == 18 assert len(ident.get_records()) == 18
assert len([r for r in ident.get_records() if r.parents[0].name == "Person"]) == 5 assert len([r for r in ident.get_records()
assert len([r for r in ident.get_records() if r.parents[0].name == "Measurement"]) == 11 if r.parents[0].name == "Person"]) == 5
assert len([r for r in ident.get_records() if r.parents[0].name == "Project"]) == 2 assert len([r for r in ident.get_records()
if r.parents[0].name == "Measurement"]) == 11
assert len([r for r in ident.get_records()
if r.parents[0].name == "Project"]) == 2
# The crawler contains lots of duplicates, because identifiables have not been resolved yet: # The crawler contains lots of duplicates, because identifiables have not been resolved yet:
assert len(ident.get_records()) != len(crawler.updateList) assert len(ident.get_records()) != len(crawler.updateList)
...@@ -194,8 +205,10 @@ def test_crawler_update_list(crawler, ident): ...@@ -194,8 +205,10 @@ def test_crawler_update_list(crawler, ident):
id_r0 = ident.get_identifiable(r_cur) id_r0 = ident.get_identifiable(r_cur)
assert r_cur.parents[0].name == id_r0.parents[0].name assert r_cur.parents[0].name == id_r0.parents[0].name
assert r_cur.get_property("first_name").value == id_r0.get_property("first_name").value assert r_cur.get_property(
assert r_cur.get_property("last_name").value == id_r0.get_property("last_name").value "first_name").value == id_r0.get_property("first_name").value
assert r_cur.get_property(
"last_name").value == id_r0.get_property("last_name").value
assert len(r_cur.parents) == 1 assert len(r_cur.parents) == 1
assert len(id_r0.parents) == 1 assert len(id_r0.parents) == 1
assert len(r_cur.properties) == 2 assert len(r_cur.properties) == 2
...@@ -213,9 +226,11 @@ def test_crawler_update_list(crawler, ident): ...@@ -213,9 +226,11 @@ def test_crawler_update_list(crawler, ident):
id_r1 = ident.get_identifiable(r_cur) id_r1 = ident.get_identifiable(r_cur)
assert r_cur.parents[0].name == id_r1.parents[0].name assert r_cur.parents[0].name == id_r1.parents[0].name
assert r_cur.get_property("identifier").value == id_r1.get_property("identifier").value assert r_cur.get_property(
"identifier").value == id_r1.get_property("identifier").value
assert r_cur.get_property("date").value == id_r1.get_property("date").value assert r_cur.get_property("date").value == id_r1.get_property("date").value
assert r_cur.get_property("project").value == id_r1.get_property("project").value assert r_cur.get_property(
"project").value == id_r1.get_property("project").value
assert len(r_cur.parents) == 1 assert len(r_cur.parents) == 1
assert len(id_r1.parents) == 1 assert len(id_r1.parents) == 1
assert len(r_cur.properties) == 4 assert len(r_cur.properties) == 4
...@@ -228,7 +243,8 @@ def test_crawler_update_list(crawler, ident): ...@@ -228,7 +243,8 @@ def test_crawler_update_list(crawler, ident):
assert idr_r1_test != idr_r0_test assert idr_r1_test != idr_r0_test
assert len(idr_r1.properties) == 4 assert len(idr_r1.properties) == 4
assert r_cur.get_property("responsible").value == idr_r1.get_property("responsible").value assert r_cur.get_property(
"responsible").value == idr_r1.get_property("responsible").value
assert r_cur.description == idr_r1.description assert r_cur.description == idr_r1.description
# test whether compare_entites function works in this context: # test whether compare_entites function works in this context:
...@@ -249,13 +265,11 @@ def test_crawler_update_list(crawler, ident): ...@@ -249,13 +265,11 @@ def test_crawler_update_list(crawler, ident):
def test_synchronization(crawler, ident): def test_synchronization(crawler, ident):
insl, updl = crawler.synchronize(commit_changes=False) insl, updl = crawler.synchronize(commit_changes=False)
# breakpoint()
# ident.check_record(ident.get_records()[1], insl[0])
assert len(insl) == 0 assert len(insl) == 0
assert len(updl) == 0 assert len(updl) == 0
def test_identifiable_adapter(): def test_identifiablxe_adapter():
query = IdentifiableAdapter.create_query_for_identifiable( query = IdentifiableAdapter.create_query_for_identifiable(
db.Record().add_parent("Person") db.Record().add_parent("Person")
.add_property("first_name", value="A") .add_property("first_name", value="A")
...@@ -357,14 +371,17 @@ def test_split_into_inserts_and_updates_trivial(crawler): ...@@ -357,14 +371,17 @@ def test_split_into_inserts_and_updates_trivial(crawler):
def test_split_into_inserts_and_updates_single(mock_retrieve): def test_split_into_inserts_and_updates_single(mock_retrieve):
crawler = mock_retrieve crawler = mock_retrieve
entlist = [db.Record(name="A").add_parent("C"), db.Record(name="B").add_parent("C")] entlist = [db.Record(name="A").add_parent(
"C"), db.Record(name="B").add_parent("C")]
assert crawler.get_identified_record_from_local_cache(entlist[0]) is None assert crawler.get_identified_record_from_local_cache(entlist[0]) is None
assert crawler.get_identified_record_from_local_cache(entlist[1]) is None assert crawler.get_identified_record_from_local_cache(entlist[1]) is None
assert crawler.can_be_checked_externally(entlist[0]) assert crawler.can_be_checked_externally(entlist[0])
assert crawler.can_be_checked_externally(entlist[1]) assert crawler.can_be_checked_externally(entlist[1])
assert crawler.identifiableAdapter.retrieve_identified_record_for_record(entlist[0]).id == 1111 assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
assert crawler.identifiableAdapter.retrieve_identified_record_for_record(entlist[1]) is None entlist[0]).id == 1111
assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
entlist[1]) is None
insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist)) insert, update = crawler.split_into_inserts_and_updates(deepcopy(entlist))
assert len(insert) == 1 assert len(insert) == 1
...@@ -418,7 +435,8 @@ def test_split_into_inserts_and_updates_with_complex(mock_retrieve): ...@@ -418,7 +435,8 @@ def test_split_into_inserts_and_updates_with_complex(mock_retrieve):
# ^ # ^
# | # |
# F <- B <- G # F <- B <- G
a = db.Record(name="A").add_parent("C").add_property('d', 13).add_property('e', "lskdjlsfdj") a = db.Record(name="A").add_parent("C").add_property(
'd', 13).add_property('e', "lskdjlsfdj")
b = db.Record(name="B").add_parent("C") b = db.Record(name="B").add_parent("C")
g = db.Record(name="G").add_parent("C") g = db.Record(name="G").add_parent("C")
f = db.Record(name="F").add_parent("C") f = db.Record(name="F").add_parent("C")
...@@ -459,7 +477,8 @@ def test_all_references_are_existing_already(crawler): ...@@ -459,7 +477,8 @@ def test_all_references_are_existing_already(crawler):
base_mocked_lookup, known={"A": db.Record(name="A").add_parent("C"), base_mocked_lookup, known={"A": db.Record(name="A").add_parent("C"),
"B": db.Record(name="B").add_parent("C")})) "B": db.Record(name="B").add_parent("C")}))
assert crawler.all_references_are_existing_already(db.Record().add_property('a', 123)) assert crawler.all_references_are_existing_already(
db.Record().add_property('a', 123))
assert crawler.all_references_are_existing_already(db.Record() assert crawler.all_references_are_existing_already(db.Record()
.add_property('a', db.Record(id=123))) .add_property('a', db.Record(id=123)))
assert crawler.all_references_are_existing_already(db.Record() assert crawler.all_references_are_existing_already(db.Record()
...@@ -477,7 +496,8 @@ def test_all_references_are_existing_already(crawler): ...@@ -477,7 +496,8 @@ def test_all_references_are_existing_already(crawler):
def test_can_be_checked_externally(crawler): def test_can_be_checked_externally(crawler):
assert crawler.can_be_checked_externally(db.Record().add_property('a', 123)) assert crawler.can_be_checked_externally(
db.Record().add_property('a', 123))
assert crawler.can_be_checked_externally(db.Record() assert crawler.can_be_checked_externally(db.Record()
.add_property('a', db.Record(id=123))) .add_property('a', db.Record(id=123)))
assert crawler.can_be_checked_externally(db.Record() assert crawler.can_be_checked_externally(db.Record()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment