diff --git a/integrationtests/test_base_table_exporter_integration.py b/integrationtests/test_base_table_exporter_integration.py index 286c4ac3a6a6f11d5c308892615579ea60ea89db..0dbfc7e785be0949fb6c3c99a68225d0e5ea7c7a 100644 --- a/integrationtests/test_base_table_exporter_integration.py +++ b/integrationtests/test_base_table_exporter_integration.py @@ -82,7 +82,7 @@ def setup_module(): """Clear all test entities""" try: db.execute_query("FIND ENTITY Test*").delete() - except BaseException: + except Exception: pass diff --git a/integrationtests/test_cache.py b/integrationtests/test_cache.py index 13470b8b0d523d1a30d3cf6895efabae3b245784..7724cfb4febd3a605419129f1250e9d2dee5e764 100644 --- a/integrationtests/test_cache.py +++ b/integrationtests/test_cache.py @@ -67,7 +67,7 @@ class CacheTest(unittest.TestCase): print(db.execute_query("FIND entity with id="+str(rec.id), unique=True)) try: print(db.execute_query("FIND Record "+str(rec.id), unique=True)) - except BaseException: + except Exception: print("Query does not work as expected") update.insert(cont, run_id) assert len(update.get_updates(run_id)) == 1 diff --git a/integrationtests/test_crawler_basics.py b/integrationtests/test_crawler_basics.py index 04eb54599fa844e3b6b23295c809b4435bb465f4..67317f32981849f4786e3b6719297d6953dffd40 100644 --- a/integrationtests/test_crawler_basics.py +++ b/integrationtests/test_crawler_basics.py @@ -114,7 +114,7 @@ class CrawlerTest(unittest.TestCase): for el in [self.rec1, self.rec2, self.rec3]: try: el.delete() - except BaseException: + except Exception: pass diff --git a/src/caosadvancedtools/crawler.py b/src/caosadvancedtools/crawler.py index 4214fd9e8c0a36c568b6a323edac21c64f5752df..ad2536c2064336f244339b7f1f6c47d1b2841a9d 100644 --- a/src/caosadvancedtools/crawler.py +++ b/src/caosadvancedtools/crawler.py @@ -322,7 +322,7 @@ class Crawler(object): except Exception as e: try: DataModelProblems.evaluate_exception(e) - except BaseException: + except Exception: pass logger.debug("Failed during execution of {}!".format( Cfood.__name__)) @@ -354,7 +354,7 @@ class Crawler(object): except Exception as e: try: DataModelProblems.evaluate_exception(e) - except BaseException: + except Exception: pass logger.debug("Failed during execution of {}!".format( cfood.__name__)) diff --git a/src/caosadvancedtools/export_related.py b/src/caosadvancedtools/export_related.py index d25381f9e4f35eabb3a17462d59ac62153d32b37..2114f38820f4b27bae2fa698005a94def5bef180 100755 --- a/src/caosadvancedtools/export_related.py +++ b/src/caosadvancedtools/export_related.py @@ -118,7 +118,7 @@ def export(cont, directory="."): try: el.download(target) print("Downloaded:", target) - except BaseException: + except Exception: print("Failed download of:", target) invert_ids(cont) diff --git a/src/caosadvancedtools/table_export.py b/src/caosadvancedtools/table_export.py index 9b821394f633c29e58c05df2ee2d08f84e693f50..78830b19a8f0274d4416a4a8faaa55de485cbc51 100644 --- a/src/caosadvancedtools/table_export.py +++ b/src/caosadvancedtools/table_export.py @@ -125,7 +125,7 @@ class BaseTableExporter(object): try: with open(export_dict, encoding="utf-8") as tmp: self.export_dict = json.load(tmp) - except BaseException: + except Exception: raise ValueError( "export_dict must be either a dictionary" " or the path to a json file.")