Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • caosdb/src/caosdb-advanced-user-tools
1 result
Show changes
......@@ -66,6 +66,82 @@ def separated(text):
return "-"*60 + "\n" + text
def apply_list_of_updates(to_be_updated, update_flags={},
update_cache=None, run_id=None):
"""Updates the `to_be_updated` Container, i.e., pushes the changes to CaosDB
after removing possible duplicates. If a chace is provided, uauthorized
updates can be cached for further authorization.
Parameters:
-----------
to_be_updated : db.Container
Container with the entities that will be updated.
update_flags : dict, optional
Dictionary of CaosDB server flags that will be used for the
update. Default is an empty dict.
update_cache : UpdateCache or None, optional
Cache in which the intended updates will be stored so they can be
authorized afterwards. Default is None.
run_id : String or None, optional
Id with which the pending updates are cached. Only meaningful if
`update_cache` is provided. Default is None.
"""
if len(to_be_updated) == 0:
return
get_ids_for_entities_with_names(to_be_updated)
# remove duplicates
tmp = db.Container()
for el in to_be_updated:
if el not in tmp:
tmp.append(el)
to_be_updated = tmp
info = "UPDATE: updating the following entities\n"
baseurl = db.configuration.get_config()["Connection"]["url"]
def make_clickable(txt, id):
return "<a href='{}/Entity/{}'>{}</a>".format(baseurl, id, txt)
for el in to_be_updated:
info += str("\t" + make_clickable(el.name, el.id)
if el.name is not None
else "\t" + make_clickable(str(el.id), el.id))
info += "\n"
logger.info(info)
logger.debug(to_be_updated)
try:
if len(to_be_updated) > 0:
logger.info(
"Updating {} Records...".format(
len(to_be_updated)))
guard.safe_update(to_be_updated, unique=False,
flags=update_flags)
except FileNotFoundError as e:
logger.info("Cannot access {}. However, it might be needed for"
" the correct execution".format(e.filename))
except ProhibitedException:
try:
update_cache.insert(to_be_updated, run_id)
except IntegrityError as e:
logger.warning(
"There were problems with the update of {}.".format(
to_be_updated),
extra={"identifier": str(to_be_updated),
"category": "update-cache"}
)
logger.debug(traceback.format_exc())
logger.debug(e)
except Exception as e:
DataModelProblems.evaluate_exception(e)
class Crawler(object):
def __init__(self, cfood_types, use_cache=False,
abort_on_exception=True, interactive=True, hideKnown=False,
......@@ -318,7 +394,11 @@ class Crawler(object):
self._cached_find_or_insert_identifiables(cfood.identifiables)
cfood.update_identifiables()
self.push_identifiables_to_CaosDB(cfood)
apply_list_of_updates(
cfood.to_be_updated,
cfood.update_flags,
update_cache=self.update_cache,
run_id=self.run_id)
except FileNotFoundError as e:
logger.info("Cannot access {}. However, it might be needed for"
" the correct execution".format(e.filename))
......@@ -516,64 +596,8 @@ carefully and if the changes are ok, click on the following link:
subject="Crawler Update",
body=text)
def push_identifiables_to_CaosDB(self, cfood):
"""
Updates the to_be_updated Container, i.e. pushes the changes to CaosDB
"""
if len(cfood.to_be_updated) == 0:
return
get_ids_for_entities_with_names(cfood.to_be_updated)
# remove duplicates
tmp = db.Container()
for el in cfood.to_be_updated:
if el not in tmp:
tmp.append(el)
cfood.to_be_updated = tmp
info = "UPDATE: updating the following entities\n"
baseurl = db.configuration.get_config()["Connection"]["url"]
for el in cfood.to_be_updated:
def make_clickable(txt, id):
return "<a href='{}/Entity/{}'>{}</a>".format(baseurl, id, txt)
info += str("\t" + make_clickable(el.name, el.id)
if el.name is not None
else "\t" + make_clickable(str(el.id), el.id))
info += "\n"
logger.info(info)
logger.debug(cfood.to_be_updated)
try:
if len(cfood.to_be_updated) > 0:
logger.info(
"Updating {} Records...".format(
len(cfood.to_be_updated)))
guard.safe_update(cfood.to_be_updated, unique=False,
flags=cfood.update_flags)
except FileNotFoundError as e:
logger.info("Cannot access {}. However, it might be needed for"
" the correct execution".format(e.filename))
except ProhibitedException:
try:
self.update_cache.insert(cfood.to_be_updated, self.run_id)
except IntegrityError as e:
logger.warning(
"There were problems with the update of {}.".format(
cfood.to_be_updated),
extra={"identifier": str(cfood.to_be_updated),
"category": "update-cache"}
)
logger.debug(traceback.format_exc())
logger.debug(e)
except Exception as e:
DataModelProblems.evaluate_exception(e)
# TODO remove static?
@staticmethod
def find_or_insert_identifiables(identifiables):
""" Sets the ids of identifiables (that do not have already an id from the
......
......@@ -3,3 +3,4 @@ from .experiment_cfood import ExperimentCFood
from .publication_cfood import PublicationCFood
from .simulation_cfood import SimulationCFood
from .software_cfood import SoftwareCFood
from .result_table_cfood import ResultTableCFood
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (C) 2019 Henrik tom Wörden
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import re
import caosdb as db
import pandas as pd
from caosadvancedtools.cfood import (AbstractFileCFood, assure_has_description,
assure_has_parent, assure_has_property,
assure_object_is_in_list, get_entity)
from caosadvancedtools.read_md_header import get_header
from ..cfood import assure_property_is, fileguide
from .experiment_cfood import ExperimentCFood
from .generic_pattern import date_pattern, date_suffix_pattern, project_pattern
from .utils import parse_responsibles, reference_records_corresponding_to_files
from .withreadme import DATAMODEL as dm
from .withreadme import RESULTS, REVISIONOF, SCRIPTS, WithREADME, get_glob
# TODO similarities with TableCrawler
class ResultTableCFood(AbstractFileCFood):
# win_paths can be used to define fields that will contain windows style
# path instead of the default unix ones. Possible fields are:
# ["results", "revisionOf"]
win_paths = []
table_re = r"result_table_(?P<recordtype>.*).csv$"
property_name_re = re.compile(r"^(?P<pname>.+?)\s*(\[\s?(?P<unit>.*?)\s?\] *)?$")
@staticmethod
def name_beautifier(x): return x
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.table = pd.read_csv(fileguide.access(self.crawled_path))
@staticmethod
def get_re():
return (".*/ExperimentalData/"+project_pattern + date_pattern +
date_suffix_pattern + ResultTableCFood.table_re)
def create_identifiables(self):
self.recs = []
self.experiment, self.project = (
ExperimentCFood.create_identifiable_experiment(self.match))
for idx, row in self.table.iterrows():
rec = db.Record()
rec.add_parent(self.match.group("recordtype"))
for col in self.table.columns[:2]:
match = re.match(ResultTableCFood.property_name_re, col)
if match.group("unit"):
rec.add_property(match.group("pname"), row.loc[col], unit=match.group("unit"))
else:
rec.add_property(match.group("pname"), row.loc[col])
self.identifiables.append(rec)
self.recs.append(rec)
self.identifiables.extend([self.project, self.experiment])
def update_identifiables(self):
for ii, (idx, row) in enumerate(self.table.iterrows()):
for col in row.index:
match = re.match(ResultTableCFood.property_name_re, col)
assure_property_is(self.recs[ii], match.group("pname"), row.loc[col], to_be_updated=self.to_be_updated)
assure_property_is(self.experiment, self.match.group("recordtype"),
self.recs, to_be_updated=self.to_be_updated,
datatype=db.LIST(self.match.group("recordtype")))
#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2021 Indiscale GmbH <info@indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#
"""An example script that illustrates how scripts can be used in conjunction
with the generic_analysis module.
The data model needed for this script is:
Analysis:
sources: REFEERENCE
scripts: FILE
results: REFEERENCE
mean_value: DOUBLE
Person:
Email: TEXT
"""
import argparse
import logging
import sys
from argparse import RawTextHelpFormatter
from datetime import datetime
import caosdb as db
import matplotlib.pyplot as plt
import numpy as np
from caosadvancedtools.cfood import assure_property_is
from caosadvancedtools.crawler import apply_list_of_updates
from caosadvancedtools.guard import INSERT, UPDATE
from caosadvancedtools.guard import global_guard as guard
from caosadvancedtools.serverside.helper import send_mail as main_send_mail
# logging should be done like this in order to allow the caller script to
# direct the output.
logger = logging.getLogger(__name__)
# allow updates of existing entities
guard.set_level(level=UPDATE)
def send_mail(changes: [db.Entity], receipient: str):
""" calls sendmail in order to send a mail to the curator about pending
changes
Parameters:
-----------
changes: The CaosDB entities in the version after the update.
receipient: The person who shall receive the mail.
"""
caosdb_config = db.configuration.get_config()
text = """Dear Curator,
The following changes where done automatically.
{changes}
""".format(changes="\n".join(changes))
try:
fro = caosdb_config["advancedtools"]["automated_updates.from_mail"]
except KeyError:
logger.error("Server Configuration is missing a setting for "
"sending mails. The administrator should check "
"'from_mail'.")
return
main_send_mail(
from_addr=fro,
to=receipient,
subject="Automated Update",
body=text)
def main(args):
# auth_token is provided by the server side scripting API
# use this token for authentication when creating a new connection
if hasattr(args, "auth_token") and args.auth_token:
db.configure_connection(auth_token=args.auth_token)
logger.debug("Established connection")
try:
dataAnalysisRecord = db.Record(id=args.entityid).retrieve()
except db.TransactionError:
logger.error("Cannot retrieve Record with id ={}".format(
args.entityid
))
# The script may require certain information to exist. Here, we expect that
# a sources Property exists that references a numpy file.
# Similarly an InputDataSet could be used.
if (dataAnalysisRecord.get_property("sources") is None
or not db.apiutils.is_reference(
dataAnalysisRecord.get_property("sources"))):
raise RuntimeError("sources Refenrence must exist.")
logger.debug("Found required data.")
# ####### this core might be replaced by a call to another script ####### #
# Download the data
source_val = dataAnalysisRecord.get_property("sources").value
npobj = db.File(
id=(source_val[0]
if isinstance(source_val, list)
else source_val)).retrieve()
npfile = npobj.download()
logger.debug("Downloaded data.")
data = np.load(npfile)
# Plot data
filename = "hist.png"
plt.hist(data)
plt.savefig(filename)
mean = data.mean()
# ####################################################################### #
# Insert the result plot
fig = db.File(file=filename,
path="/Analysis/results/"+str(datetime.now())+"/"+filename)
fig.insert()
# Add the mean value to the analysis Record
# If such a property existed before, it is changed if necessary. The old
# value will persist in the versioning of LinkAhead
to_be_updated = db.Container()
assure_property_is(
dataAnalysisRecord,
"mean_value",
mean,
to_be_updated=to_be_updated
)
# Add the file with the plot to the analysis Record
# If a file was already referenced, the new one will be referenced instead.
# The old file is being kept and is still referenced in an old version of
# the analysis Record.
assure_property_is(
dataAnalysisRecord,
"results",
[fig.id],
to_be_updated=to_be_updated
)
if len(to_be_updated) > 0:
print(to_be_updated)
apply_list_of_updates(to_be_updated, update_flags={})
logger.debug("Update sucessful.")
logger.info("The following Entities were changed:\n{}.".format(
[el.id for el in to_be_updated])
)
# Send mails to people that are referenced.
people = db.execute_query("FIND RECORD Person WHICH IS REFERENCED BY "
"{}".format(dataAnalysisRecord.id))
for person in people:
if person.get_property("Email") is not None:
send_mail([str(el) for el in to_be_updated],
receipient=person.get_property("Email").value)
logger.debug("Mails send.")
def parse_args():
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter)
parser.add_argument("--auth-token",
help="Token provided by the server for authentication")
parser.add_argument("entityid",
help="The ID of the DataAnalysis Record.", type=int)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
sys.exit(main(args))
# encoding: utf-8
#
# Copyright (C) 2021 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de>
# Copyright (C) 2021 IndiScale GmbH <info@indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
#
# See: https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools/-/issues/55
# This source file is work in progress and currently untested.
"""
Variante I: Python module implementiert eine 'main' function, die einen Record
als Argument entgegennimmt und diesen um z.B. 'results' ergänzt und updated.
Variante II: Ein skript erhält eine ID als Argument (z.B. auf der command line)
und updated das Objekt selbstständig.
Idealfall: Idempotenz; I.e. es ist egal, ob das Skript schon aufgerufen wurde.
Ein weiterer Aufruf führt ggf. zu einem Update (aber nur bei Änderungen von
z.B. Parametern)
Das aufgerufene Skript kann beliebige Eigenschaften benutzen und erstellen.
ABER wenn die Standardeigenschaften (InputDataSet, etc) verwendet werden, kann
der Record leicht erzeugt werden.
"Analyze" "Perform Anlysis"
Knopf an Record Form im WebUI
im WebUI
| |
| |
v v
Winzskript, dass einen
DataAnalysis-Stub erzeugt
|
|
v
execute_script Routine --> AnalysisSkript
erhält den Stub und ggf. Nutzt Funktionen um Updates durchzuführen falls
den Pythonmodulenamen notwendig, Email
^
|
|
Cronjob findet outdated
DataAnalysis
Analyseskript macht update:
- flexibel welche Änderungen vorgenommen werden (z.B. mehrere Records)
- spezielle Funktionen sollten verwendet werden
- Logging und informieren muss im Skript passieren
- Skript kann mit subprocess aufgerufen werden (alternative unvollständige
DataAnalysis einfügen)
# Features
- Emailversand bei Insert oder Update
- Kurze Info: "Create XY Analysis" kann vmtl automatisch erzeugt werden
- Debug Info: müsste optional/bei Fehler zur Verfügung stehen.
- Skript/Software version sollte gespeichert werden
Outlook: the part of the called scripts that interact with LinkAhead might in
future be replaced by the Crawler. The working directory would be copied to the
file server and then crawled.
"""
import argparse
import importlib
import logging
import os
import sys
import caosdb as db
from caosdb.utils.server_side_scripting import run_server_side_script
logger = logging.getLogger(__name__)
def check_referenced_script(record: db.Record):
""" return the name of a referenced script
If the supplied record does not have an appropriate Property warings are
logged.
"""
if record.get_property("scripts") is None:
logger.warning("The follwing changed Record is missing the 'scripts' "
"Property:\n{}".format(str(record)))
return
script_prop = record.get_property("scripts")
if not db.apiutils.is_reference(script_prop):
logger.warning("The 'scripts' Property of the following Record should "
"reference a File:\n{}".format(str(record)))
return
script = db.execute_query("FIND ENTITY WITH id={}".format(
script_prop.value[0] if isinstance(script_prop.value, list)
else script_prop.value), unique=True)
if (not isinstance(script, db.File)):
logger.warning("The 'scripts' Property of the Record {} should "
"reference a File. Entity {} is not a File".format(
record.id, script_prop.value))
return
script_name = os.path.basename(script.path)
return script_name
def call_script(script_name: str, record_id: int):
ret = run_server_side_script(script_name, record_id)
if ret.code != 0:
logger.error("Script failed!")
logger.debug(ret.stdout)
logger.error(ret.stderr)
else:
logger.debug(ret.stdout)
logger.error(ret.stderr)
def run(dataAnalysisRecord: db.Record):
"""run a data analysis script.
There are two options:
1. A python script installed as a pip package.
2. A generic script that can be executed on the command line.
Using a python package:
It should be located in package plugin and implement at least
a main function that takes a DataAnalysisRecord as a single argument.
The script may perform changes to the Record and insert and update
Entities.
Using a generic script:
The only argument that is supplied to the script is the ID of the
dataAnalysisRecord. Apart from the different Argument everything that is
said for the python package holds here.
"""
if dataAnalysisRecord.get_property("scripts") is not None:
script_name = check_referenced_script(dataAnalysisRecord)
logger.debug(
"Found 'scripts'. Call script '{}' in separate process".format(
script_name)
)
call_script(script_name, dataAnalysisRecord.id)
logger.debug(
"Script '{}' done.\n-----------------------------------".format(
script_name))
if dataAnalysisRecord.get_property("Software") is not None:
mod = dataAnalysisRecord.get_property("Software").value
logger.debug(
"Found 'Software'. Call '{}' as Python module".format(
mod)
)
m = importlib.import_module(mod)
m.main(dataAnalysisRecord)
logger.debug(
"'main' function of Python module '{}' done"
".\n-----------------------------------".format(mod))
def _parse_arguments():
""" Parses the command line arguments. """
parser = argparse.ArgumentParser(description='__doc__')
parser.add_argument("--module", help="An id an input dataset.")
parser.add_argument("--inputset", help="An id an input dataset.")
parser.add_argument("--parameterset", help="An id of a parameter record.")
return parser.parse_args()
def main():
""" This is for testing only. """
args = _parse_arguments()
dataAnalysisRecord = db.Record()
dataAnalysisRecord.add_property(name="InputDataSet", value=args.entity)
dataAnalysisRecord.add_property(name="ParameterSet", value=args.parameter)
dataAnalysisRecord.add_property(name="Software", value=args.module)
dataAnalysisRecord.insert()
run(dataAnalysisRecord)
if __name__ == "__main__":
args = _parse_arguments()
sys.exit(main(args))
# Parent of all datasets which are used as input to or output from
# analysis scripts
Dataset:
# Parent of all parametersets which are used as input for analysis scripts
ParameterSet:
DataAnalysis:
recommended_properties:
InputDataset:
datatype: Dataset
OutputDataset:
datatype: Dataset
ParameterSet:
date:
\ No newline at end of file
#!/usr/bin/env python3
# Sync data model for generic data analysis method
# A. Schlemmer, 09/2021
from caosadvancedtools.models import parser
model = parser.parse_model_from_yaml("model.yml")
model.sync_data_model()
......@@ -156,6 +156,9 @@ def win_path_converter(val):
checks whether the value looks like a windows path and converts it to posix
"""
if val == "":
return val
if not check_win_path(val):
raise ValueError(
"Field should be a Windows path, but is\n'{}'.".format(val))
......@@ -367,6 +370,8 @@ class TableImporter(object):
if len(self.unique_keys) > 0:
df = self.check_unique(df, filename=filename)
return df
class XLSImporter(TableImporter):
def read_file(self, filename, **kwargs):
......@@ -405,7 +410,7 @@ class XLSImporter(TableImporter):
'category': "inconsistency"})
raise DataInconsistencyError(*e.args)
self.check_dataframe(df, filename)
df = self.check_dataframe(df, filename)
return df
......@@ -422,7 +427,7 @@ class CSVImporter(TableImporter):
'category': "inconsistency"})
raise DataInconsistencyError(*ve.args)
self.check_dataframe(df, filename)
df = self.check_dataframe(df, filename)
return df
......@@ -439,6 +444,6 @@ class TSVImporter(TableImporter):
'category': "inconsistency"})
raise DataInconsistencyError(*ve.args)
self.check_dataframe(df, filename)
df = self.check_dataframe(df, filename)
return df
temperature [°C] ,depth
234.4,3.0
344.6,5.1
#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2021 Indiscale GmbH <info@indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#
"""
module description
"""
import caosdb as db
from caosadvancedtools.serverside.generic_analysis import \
check_referenced_script
from test_utils import BaseMockUpTest
class TestGAnalysisNoFile(BaseMockUpTest):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.entities = (
'<Response><Record name="script.py" path="/some/path/script.py'
'" id="1234"/><Query string="find record" results="1">'
'</Query></Response>')
def test_check_referenced_script(self):
# missing scripts
self.assertIsNone(check_referenced_script(db.Record()))
# wrong datatype
self.assertIsNone(check_referenced_script(db.Record().add_property(
"scripts", datatype=db.TEXT)))
# wrong value
self.assertIsNone(check_referenced_script(db.Record().add_property(
"scripts", datatype=db.REFERENCE, value="hallo")))
# no file
self.assertIsNone(check_referenced_script(db.Record().add_property(
"scripts", datatype=db.REFERENCE, value="1234")))
class TestGAnalysisFile(BaseMockUpTest):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.entities = (
'<Response><File name="script.py" path="/some/path/script.py'
'" id="1234"/><Query string="find record" results="1">'
'</Query></Response>')
def test_check_referenced_script(self):
# all correct
self.assertEqual(check_referenced_script(db.Record().add_property(
"scripts", datatype=db.REFERENCE, value="1234")), "script.py")
#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2018 Research Group Biomedical Physics,
# Max-Planck-Institute for Dynamics and Self-Organization Göttingen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#
"""
test module for ResultTableCFood
"""
import os
import re
import unittest
import caosdb as db
from caosadvancedtools.scifolder.result_table_cfood import ResultTableCFood
class CFoodTest(unittest.TestCase):
def test_re(self):
self.assertIsNotNone(re.match(ResultTableCFood.table_re, "result_table_Hallo.csv"))
self.assertEqual(re.match(ResultTableCFood.table_re, "result_table_Hallo.csv").group("recordtype"),
"Hallo")
self.assertIsNotNone(re.match(ResultTableCFood.table_re,
"result_table_Cool RecordType.csv"))
self.assertEqual(re.match(ResultTableCFood.table_re, "result_table_Cool RecordType.csv").group("recordtype"),
"Cool RecordType")
self.assertIsNone(re.match(ResultTableCFood.table_re, "result_tableCool RecordType.csv"))
self.assertIsNotNone(re.match(ResultTableCFood.property_name_re,
"temperature [C]"))
self.assertEqual(re.match(ResultTableCFood.property_name_re,
"temperature [C]").group("pname"),
"temperature")
self.assertEqual(re.match(ResultTableCFood.property_name_re,
"temperature [C]").group("unit"), "C")
self.assertEqual(re.match(ResultTableCFood.property_name_re,
"temperature [ C ]").group("unit"), "C")
self.assertEqual(re.match(ResultTableCFood.property_name_re,
"temperature").group("pname"), "temperature")
def test_ident(self):
rtc = ResultTableCFood(os.path.join(os.path.dirname(__file__), "test.csv"))
rtc.match = re.match(ResultTableCFood.get_re(),
"/ExperimentalData/2010_TestProject/2019-02-03_something/result_table_RT.csv")
rtc.create_identifiables()
rtc.update_identifiables()