Skip to content
Snippets Groups Projects
Commit b767a8c1 authored by I. Nüske's avatar I. Nüske
Browse files

MNT: Add 'from e' to all orphaned 'raise's

parent 65ca9fba
No related branches found
No related tags found
2 merge requests!128MNT: Added a warning when column metadata is not configured, and a better...,!126Fix pylint errors
...@@ -693,7 +693,7 @@ def assure_has_property(entity, name, value, to_be_updated=None, ...@@ -693,7 +693,7 @@ def assure_has_property(entity, name, value, to_be_updated=None,
try: try:
compare_time = datetime.fromisoformat(el.value) compare_time = datetime.fromisoformat(el.value)
except ValueError: except ValueError as e:
# special case of wrong iso format # special case of wrong iso format
# time zone # time zone
tmp = el.value.split("+") tmp = el.value.split("+")
...@@ -711,7 +711,7 @@ def assure_has_property(entity, name, value, to_be_updated=None, ...@@ -711,7 +711,7 @@ def assure_has_property(entity, name, value, to_be_updated=None,
ms = '.' + tmp[1] + '0'*(6-len(tmp[1])) ms = '.' + tmp[1] + '0'*(6-len(tmp[1]))
else: else:
raise ValueError( raise ValueError(
"invalid millisecond format in {}".format(el.value)) "invalid millisecond format in {}".format(el.value)) from e
else: else:
ms = "" ms = ""
tmp = tmp[0] + ms + tz_str tmp = tmp[0] + ms + tz_str
......
...@@ -141,7 +141,7 @@ it is not at the beginning, it must be preceded by a blank line. ...@@ -141,7 +141,7 @@ it is not at the beginning, it must be preceded by a blank line.
try: try:
yaml_part = yaml.load("\n".join(headerlines), Loader=yaml.BaseLoader) yaml_part = yaml.load("\n".join(headerlines), Loader=yaml.BaseLoader)
except yaml.scanner.ScannerError as e: except yaml.scanner.ScannerError as e:
raise ParseErrorsInHeader(filename, e) raise ParseErrorsInHeader(filename, e) from e
# except yaml.error.MarkedYAMLError as e: # except yaml.error.MarkedYAMLError as e:
# raise NoValidHeader(filename) # raise NoValidHeader(filename)
if not isinstance(yaml_part, dict): if not isinstance(yaml_part, dict):
......
...@@ -125,10 +125,10 @@ class BaseTableExporter(object): ...@@ -125,10 +125,10 @@ class BaseTableExporter(object):
try: try:
with open(export_dict, encoding="utf-8") as tmp: with open(export_dict, encoding="utf-8") as tmp:
self.export_dict = json.load(tmp) self.export_dict = json.load(tmp)
except Exception: except Exception as e:
raise ValueError( raise ValueError(
"export_dict must be either a dictionary" "export_dict must be either a dictionary"
" or the path to a json file.") " or the path to a json file.") from e
self.record = record self.record = record
self._check_sanity_of_export_dict() self._check_sanity_of_export_dict()
self.raise_error_if_missing = raise_error_if_missing self.raise_error_if_missing = raise_error_if_missing
...@@ -159,7 +159,7 @@ class BaseTableExporter(object): ...@@ -159,7 +159,7 @@ class BaseTableExporter(object):
logger.debug(exc) logger.debug(exc)
errmssg = "Empty or invalid query '{}' for entry {}".format( errmssg = "Empty or invalid query '{}' for entry {}".format(
q, e) q, e)
raise TableExportError(errmssg) raise TableExportError(errmssg) from exc
if val is not None: if val is not None:
self.info[e] = val self.info[e] = val
...@@ -189,7 +189,7 @@ class BaseTableExporter(object): ...@@ -189,7 +189,7 @@ class BaseTableExporter(object):
errmssg += ", nor does record {} have a property of that name".format( errmssg += ", nor does record {} have a property of that name".format(
self.record.id) self.record.id)
errmssg += "." errmssg += "."
raise TableExportError(errmssg) raise TableExportError(errmssg) from exc
if self.missing: if self.missing:
errmssg = "The following mandatory entries are missing:\n" errmssg = "The following mandatory entries are missing:\n"
......
...@@ -497,7 +497,7 @@ class XLSImporter(TableImporter): ...@@ -497,7 +497,7 @@ class XLSImporter(TableImporter):
str(e)), str(e)),
extra={'identifier': str(filename), extra={'identifier': str(filename),
'category': "inconsistency"}) 'category': "inconsistency"})
raise DataInconsistencyError(*e.args) raise DataInconsistencyError(*e.args) from e
if len(xls_file.sheet_names) > 1: if len(xls_file.sheet_names) > 1:
# Multiple sheets is the default now. Only show in debug # Multiple sheets is the default now. Only show in debug
...@@ -515,7 +515,7 @@ class XLSImporter(TableImporter): ...@@ -515,7 +515,7 @@ class XLSImporter(TableImporter):
"Cannot parse {}.\n{}".format(filename, e), "Cannot parse {}.\n{}".format(filename, e),
extra={'identifier': str(filename), extra={'identifier': str(filename),
'category': "inconsistency"}) 'category': "inconsistency"})
raise DataInconsistencyError(*e.args) raise DataInconsistencyError(*e.args) from e
df = self.check_dataframe(df, filename) df = self.check_dataframe(df, filename)
...@@ -537,7 +537,7 @@ class CSVImporter(TableImporter): ...@@ -537,7 +537,7 @@ class CSVImporter(TableImporter):
"Cannot parse {}.\n{}".format(filename, ve), "Cannot parse {}.\n{}".format(filename, ve),
extra={'identifier': str(filename), extra={'identifier': str(filename),
'category': "inconsistency"}) 'category': "inconsistency"})
raise DataInconsistencyError(*ve.args) raise DataInconsistencyError(*ve.args) from ve
except TypeError as te: except TypeError as te:
# Iterate through the columns and rows to identify # Iterate through the columns and rows to identify
# problematic cells with wrong types. # problematic cells with wrong types.
...@@ -577,7 +577,7 @@ class CSVImporter(TableImporter): ...@@ -577,7 +577,7 @@ class CSVImporter(TableImporter):
for err in error_list: for err in error_list:
msg += f" * column \"{err[0]}\": Expected \"{err[1]}\" but found \"{err[2]}\".\n" msg += f" * column \"{err[0]}\": Expected \"{err[1]}\" but found \"{err[2]}\".\n"
msg += '\n' msg += '\n'
raise DataInconsistencyError(msg) raise DataInconsistencyError(msg) from te
df = self.check_dataframe(df, filename) df = self.check_dataframe(df, filename)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment