Skip to content
Snippets Groups Projects
Commit fa86a04e authored by I. Nüske's avatar I. Nüske
Browse files

MNT: Ignore errors in unused files soon to be deprecated

parent 7c5928e0
No related branches found
No related tags found
2 merge requests!128MNT: Added a warning when column metadata is not configured, and a better...,!126Fix pylint errors
...@@ -52,6 +52,11 @@ from linkahead.exceptions import (BadQueryError, EmptyUniqueQueryError, ...@@ -52,6 +52,11 @@ from linkahead.exceptions import (BadQueryError, EmptyUniqueQueryError,
from .datamodel_problems import DataModelProblems from .datamodel_problems import DataModelProblems
from .guard import global_guard as guard from .guard import global_guard as guard
# The pylint warnings triggered in this file are ignored, as this code is
# assumed to be deprecated in the near future. Should this change, they need
# to be reevaluated.
ENTITIES = {} ENTITIES = {}
PROPERTIES = {} PROPERTIES = {}
RECORDS = {} RECORDS = {}
...@@ -183,7 +188,7 @@ class AbstractCFood(object, metaclass=ABCMeta): ...@@ -183,7 +188,7 @@ class AbstractCFood(object, metaclass=ABCMeta):
""" """
@classmethod @classmethod
def match_item(cls, item): def match_item(cls, item): # pylint: disable=unused-argument
""" Matches an item found by the crawler against this class. Returns """ Matches an item found by the crawler against this class. Returns
True if the item shall be treated by this class, i.e. if this class True if the item shall be treated by this class, i.e. if this class
matches the item. matches the item.
...@@ -215,7 +220,7 @@ class AbstractCFood(object, metaclass=ABCMeta): ...@@ -215,7 +220,7 @@ class AbstractCFood(object, metaclass=ABCMeta):
# TODO looking for should `attach` the files itsself. This would allow to # TODO looking for should `attach` the files itsself. This would allow to
# group them right away and makes it unnecessary to check matches later # group them right away and makes it unnecessary to check matches later
# again. # again.
def looking_for(self, item): def looking_for(self, item): # pylint: disable=unused-argument
""" """
returns True if item can be added to this CFood. returns True if item can be added to this CFood.
...@@ -351,7 +356,7 @@ class AbstractFileCFood(AbstractCFood): ...@@ -351,7 +356,7 @@ class AbstractFileCFood(AbstractCFood):
raise NotImplementedError() raise NotImplementedError()
@classmethod @classmethod
def match_item(cls, path): def match_item(cls, path): # pylint: disable=arguments-renamed
""" Matches the regular expression of this class against file names """ Matches the regular expression of this class against file names
Parameters Parameters
...@@ -365,7 +370,7 @@ class AbstractFileCFood(AbstractCFood): ...@@ -365,7 +370,7 @@ class AbstractFileCFood(AbstractCFood):
# TODO looking for should `attach` the files itsself. This would allow to # TODO looking for should `attach` the files itsself. This would allow to
# group them right away and makes it unnecessary to check matches later # group them right away and makes it unnecessary to check matches later
# again. # again.
def looking_for(self, crawled_file): def looking_for(self, crawled_file): # pylint: disable=arguments-renamed
""" """
returns True if crawled_file can be added to this CFood. returns True if crawled_file can be added to this CFood.
...@@ -744,7 +749,7 @@ def assure_has_property(entity, name, value, to_be_updated=None, ...@@ -744,7 +749,7 @@ def assure_has_property(entity, name, value, to_be_updated=None,
def assure_property_is(entity, name, value, datatype=None, to_be_updated=None, def assure_property_is(entity, name, value, datatype=None, to_be_updated=None,
force=False): force=False): # pylint: disable=unused-argument
""" """
Checks whether `entity` has a Property `name` with the given value. Checks whether `entity` has a Property `name` with the given value.
......
...@@ -59,6 +59,11 @@ from .serverside.helper import send_mail as main_send_mail ...@@ -59,6 +59,11 @@ from .serverside.helper import send_mail as main_send_mail
from .suppressKnown import SuppressKnown from .suppressKnown import SuppressKnown
from .utils import create_entity_link from .utils import create_entity_link
# The pylint warnings triggered in this file are ignored, as this code is
# assumed to be deprecated in the near future. Should this change, they need
# to be reevaluated.
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -133,7 +138,7 @@ def apply_list_of_updates(to_be_updated, update_flags=None, ...@@ -133,7 +138,7 @@ def apply_list_of_updates(to_be_updated, update_flags=None,
) )
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
logger.debug(e) logger.debug(e)
except Exception as e: except Exception as e: # pylint: disable=broad-exception-caught
DataModelProblems.evaluate_exception(e) DataModelProblems.evaluate_exception(e)
...@@ -222,7 +227,7 @@ class Crawler(object): ...@@ -222,7 +227,7 @@ class Crawler(object):
new_cont = db.Container.from_xml(new) new_cont = db.Container.from_xml(new)
ids = [] ids = []
tmp = db.Container() tmp = db.Container()
update_incomplete = False update_incomplete = False # pylint: disable=unused-variable
# remove duplicate entities # remove duplicate entities
for el in new_cont: for el in new_cont:
if el.id not in ids: if el.id not in ids:
...@@ -231,13 +236,13 @@ class Crawler(object): ...@@ -231,13 +236,13 @@ class Crawler(object):
else: else:
update_incomplete = True update_incomplete = True
new_cont = tmp new_cont = tmp
if new_cont[0].version: if new_cont[0].version: # pylint: disable=no-member
valids = db.Container() valids = db.Container()
nonvalids = db.Container() nonvalids = db.Container()
for ent in new_cont: for ent in new_cont:
remote_ent = db.Entity(id=ent.id).retrieve() remote_ent = db.Entity(id=ent.id).retrieve()
if ent.version == remote_ent.version: if ent.version == remote_ent.version: # pylint: disable=no-member
valids.append(ent) valids.append(ent)
else: else:
update_incomplete = True update_incomplete = True
...@@ -319,10 +324,10 @@ class Crawler(object): ...@@ -319,10 +324,10 @@ class Crawler(object):
logger.debug(e) logger.debug(e)
# TODO: Generally: in which cases should exceptions be raised? When is # TODO: Generally: in which cases should exceptions be raised? When is
# errors_occured set to True? The expected behavior must be documented. # errors_occured set to True? The expected behavior must be documented.
except Exception as e: except Exception as e: # pylint: disable=broad-exception-caught
try: try:
DataModelProblems.evaluate_exception(e) DataModelProblems.evaluate_exception(e)
except Exception: except Exception: # pylint: disable=broad-exception-caught
pass pass
logger.debug("Failed during execution of {}!".format( logger.debug("Failed during execution of {}!".format(
Cfood.__name__)) Cfood.__name__))
...@@ -351,10 +356,10 @@ class Crawler(object): ...@@ -351,10 +356,10 @@ class Crawler(object):
logger.info("Cannot access {}. However, it might be needed for" logger.info("Cannot access {}. However, it might be needed for"
" the correct execution".format(e.filename)) " the correct execution".format(e.filename))
remove_cfoods.append(cfood) remove_cfoods.append(cfood)
except Exception as e: except Exception as e: # pylint: disable=broad-exception-caught
try: try:
DataModelProblems.evaluate_exception(e) DataModelProblems.evaluate_exception(e)
except Exception: except Exception: # pylint: disable=broad-exception-caught
pass pass
logger.debug("Failed during execution of {}!".format( logger.debug("Failed during execution of {}!".format(
cfood.__name__)) cfood.__name__))
...@@ -444,10 +449,10 @@ class Crawler(object): ...@@ -444,10 +449,10 @@ class Crawler(object):
except DataInconsistencyError as e: except DataInconsistencyError as e:
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
logger.debug(e) logger.debug(e)
except Exception as e: except Exception as e: # pylint: disable=broad-exception-caught
try: try:
DataModelProblems.evaluate_exception(e) DataModelProblems.evaluate_exception(e)
except Exception: except Exception: # pylint: disable=broad-exception-caught
pass pass
logger.info("Failed during execution of {}!".format( logger.info("Failed during execution of {}!".format(
cfood.__class__.__name__)) cfood.__class__.__name__))
...@@ -682,7 +687,7 @@ carefully and if the changes are ok, click on the following link: ...@@ -682,7 +687,7 @@ carefully and if the changes are ok, click on the following link:
guard.safe_insert(missing, unique=False, guard.safe_insert(missing, unique=False,
flags={"force-missing-obligatory": "ignore"}) flags={"force-missing-obligatory": "ignore"})
inserted.append(ent) inserted.append(ent)
except Exception as e: except Exception as e: # pylint: disable=broad-exception-caught
DataModelProblems.evaluate_exception(e) DataModelProblems.evaluate_exception(e)
if len(existing) > 0: if len(existing) > 0:
info = "Identified the following existing entities:\n" info = "Identified the following existing entities:\n"
......
...@@ -25,6 +25,10 @@ from linkahead.common.utils import uuid ...@@ -25,6 +25,10 @@ from linkahead.common.utils import uuid
from .cfood import (assure_has_description, assure_has_parent, from .cfood import (assure_has_description, assure_has_parent,
assure_property_is) assure_property_is)
# The pylint warnings triggered in this file are ignored, as this code is
# assumed to be deprecated in the near future. Should this change, they need
# to be reevaluated.
class EntityMapping(object): class EntityMapping(object):
""" """
...@@ -42,6 +46,7 @@ class EntityMapping(object): ...@@ -42,6 +46,7 @@ class EntityMapping(object):
if target._cuid is None: if target._cuid is None:
target._cuid = str(uuid()) target._cuid = str(uuid())
self.to_existing[str(target._cuid)] = existing self.to_existing[str(target._cuid)] = existing
target._cuid = str(uuid()) # pylint: disable=protected-access
self.to_target[existing.id] = target self.to_target[existing.id] = target
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment