From 273bb9500659892fa0a7ba5a283f257fe165cd4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20tom=20W=C3=B6rden?= <h.tomwoerden@indiscale.com> Date: Tue, 11 Apr 2023 11:32:08 +0200 Subject: [PATCH] MAINT: minor fixes --- integrationtests/test.sh | 2 ++ integrationtests/test_im_und_export.py | 3 ++- src/caosadvancedtools/cfood.py | 2 +- src/caosadvancedtools/crawler.py | 6 +++--- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/integrationtests/test.sh b/integrationtests/test.sh index 36730cc9..ab6ea545 100755 --- a/integrationtests/test.sh +++ b/integrationtests/test.sh @@ -58,6 +58,8 @@ echo "./crawl.py -a $RUN_ID /" set +e if grep "There where unauthorized changes" $OUT then + echo "There were still unauthorized changes which should not have happend!" + echo "Test FAILED!" exit 1 fi set -e diff --git a/integrationtests/test_im_und_export.py b/integrationtests/test_im_und_export.py index cd6782c2..3940ccce 100644 --- a/integrationtests/test_im_und_export.py +++ b/integrationtests/test_im_und_export.py @@ -14,7 +14,8 @@ if __name__ == "__main__": # delete everything print("Clearing database") recs = db.execute_query("FIND ENTITY entity with id>99") - recs.delete() + if len(recs) > 0: + recs.delete() assert 0 == len(db.execute_query("FIND File which is stored at " "**/poster.pdf")) print("Importing stored elements") diff --git a/src/caosadvancedtools/cfood.py b/src/caosadvancedtools/cfood.py index 4a9f955a..c0da4f01 100644 --- a/src/caosadvancedtools/cfood.py +++ b/src/caosadvancedtools/cfood.py @@ -807,7 +807,7 @@ class RowCFood(AbstractCFood): def update_identifiables(self): rec = self.identifiables[0] - for key, value in self.item.iteritems(): + for key, value in self.item.items(): if key in self.unique_cols: continue assure_property_is(rec, key, diff --git a/src/caosadvancedtools/crawler.py b/src/caosadvancedtools/crawler.py index 41c8871f..cbf5e98f 100644 --- a/src/caosadvancedtools/crawler.py +++ b/src/caosadvancedtools/crawler.py @@ -212,7 +212,7 @@ class Crawler(object): new_cont.insert(unique=False) logger.info("Successfully inserted {} records!".format(len(new_cont))) all_inserts += len(new_cont) - logger.info("Finished with authorized updates.") + logger.info("Finished with authorized insertes.") changes = cache.get_updates(run_id) @@ -220,7 +220,7 @@ class Crawler(object): new_cont = db.Container() new_cont = new_cont.from_xml(new) ids = [] - tmp = [] + tmp = db.Container() update_incomplete = False # remove duplicate entities for el in new_cont: @@ -237,7 +237,7 @@ class Crawler(object): for ent in new_cont: remote_ent = db.Entity(id=ent.id).retrieve() if ent.version == remote_ent.version: - valids.append(remote_ent) + valids.append(ent) else: update_incomplete = True nonvalids.append(remote_ent) -- GitLab