Skip to content
Snippets Groups Projects
Commit 4df3821f authored by Henrik tom Woerden's avatar Henrik tom Woerden
Browse files

Integrationtest

parent 21ea9609
No related branches found
No related tags found
No related merge requests found
Experiment:
obligatory_properties:
date:
datatype: DATETIME
description: 'date of the experiment'
identifier:
datatype: TEXT
description: 'identifier of the experiment'
# TODO empty recommended_properties is a problem
#recommended_properties:
responsible:
Project:
Person:
obligatory_properties:
firstName:
datatype: TEXT
description: 'First name of a Person.'
lastName:
datatype: TEXT
description: 'LastName of a Person.'
responsible:
responsible:
datatype: REFERENCE
results:
datatype: REFERENCE
data:
datatype: REFERENCE
scripts:
datatype: REFERENCE
Source:
Simulation:
obligatory_properties:
date:
identifier:
responsible:
Analysis:
obligatory_properties:
date:
identifier:
responsible:
Publication:
Thesis:
inherit_from_suggested:
- Publication
Article:
inherit_from_suggested:
- Publication
Poster:
inherit_from_suggested:
- Publication
Presentation:
inherit_from_suggested:
- Publication
Report:
inherit_from_suggested:
- Publication
./filldb.sh
py.test-3 test_crawler.py
#!/usr/bin/env python3
import os
import unittest
import caosdb as db
def get_entity_with_id(eid):
return db.execute_query("FIND "+str(eid), unique=True)
class CrawlerTest(unittest.TestCase):
def test_experiment(self):
########################
# # first experiment # #
########################
# TODO saving an empty string as value in a text property leads to a
# vanishing of the property
# thus an x is used here. Needs to be fixed.
exp = db.execute_query(
"FIND Experiment with date=2019-02-03 and identifier='x'",
unique=True)
# There should be a Project with name TestProject which is referenced
project_id = exp.get_property("Project").value
project = get_entity_with_id(project_id)
assert project.name == "TestProject"
assert "Project" in [p.name for p in project.get_parents()]
# There should be a datafile as result attached with path datafile.dat
datfile_id = exp.get_property("results").value[0]
datfile = get_entity_with_id(datfile_id)
assert os.path.basename(datfile.path) == "datafile.dat"
#########################
# # second experiment # #
#########################
exp = db.execute_query(
"FIND Experiment with date=2019-02-03 and identifier='something'",
unique=True)
# Should be the same project
assert project_id == exp.get_property("Project").value
# Should have two data files
datfile_ids = exp.get_property("results").value
datafile_names = []
for did in datfile_ids:
datfile = get_entity_with_id(did)
datafile_names.append(os.path.basename(datfile.path))
assert "usefull.xlsx" in datafile_names
assert "useless.xlsx" in datafile_names
def test_analysis(self):
######################
# # first analysis # #
######################
ana = db.execute_query(
"FIND Analysis with date=2019-02-03 and identifier='x'",
unique=True)
# There should be a Project with name TestProject which is referenced
project_id = ana.get_property("Project").value
project = get_entity_with_id(project_id)
assert "Project" in [p.name for p in project.get_parents()]
# There should be a file as result attached with path results.pdf
datfile_id = ana.get_property("results").value[0]
datfile = get_entity_with_id(datfile_id)
assert os.path.basename(datfile.path) == "results.pdf"
# There should be a file as script attached with path plot.py
datfile_id = ana.get_property("scripts").value[0]
datfile = get_entity_with_id(datfile_id)
assert os.path.basename(datfile.path) == "plot.py"
#######################
# # second analysis # #
#######################
ana = db.execute_query(
"FIND Analysis with date=2019-02-03 and identifier='something'",
unique=True)
# Should be the same project
assert project_id == ana.get_property("Project").value
# Should have two data files
datfile_ids = ana.get_property("results").value
datafile_names = []
for did in datfile_ids:
datfile = get_entity_with_id(did)
datafile_names.append(os.path.basename(datfile.path))
assert "lol1.png" in datafile_names
assert "lol2.png" in datafile_names
# There should be a file as script attached with path plot.py
datfile_id = ana.get_property("scripts").value[0]
datfile = get_entity_with_id(datfile_id)
assert os.path.basename(datfile.path) == "analyse.py"
def test_publication(self):
#########################
# # first publication # #
#########################
pub = db.execute_query("FIND really_cool_finding", unique=True)
# There should be a Project with name TestProject which is referenced
##########################
# # second publication # #
##########################
pub = db.execute_query("FIND paper_on_exciting_stuff ", unique=True)
...@@ -31,9 +31,10 @@ from copy import deepcopy ...@@ -31,9 +31,10 @@ from copy import deepcopy
from datetime import datetime from datetime import datetime
import caosdb as db import caosdb as db
from caosadvancedtools.cache import Cache
from caosdb.exceptions import TransactionError from caosdb.exceptions import TransactionError
from caosadvancedtools.cache import Cache
ENTITIES = {} ENTITIES = {}
...@@ -107,8 +108,9 @@ class AbstractCFood(object): ...@@ -107,8 +108,9 @@ class AbstractCFood(object):
for key, identifiable in entities.items(): for key, identifiable in entities.items():
if identifiable is None: if identifiable is None:
print("THIS IS STRANGE. No identifiables found in {}.".format( print("THIS IS STRANGE. No identifiables found in {}.".format(
crawled_file}) crawled_file))
continue continue
existing = None existing = None
...@@ -209,7 +211,7 @@ class AbstractCFood(object): ...@@ -209,7 +211,7 @@ class AbstractCFood(object):
# is using the unique keyword # is using the unique keyword
try: try:
r = q.execute(unique=True) r = q.execute(unique=True)
except TransactionError as er: except TransactionError:
r = None r = None
if r is not None: if r is not None:
......
...@@ -57,5 +57,3 @@ class Crawler(object): ...@@ -57,5 +57,3 @@ class Crawler(object):
print("{} FILES TO BE PROCESSED.".format(len(files))) print("{} FILES TO BE PROCESSED.".format(len(files)))
return files return files
...@@ -57,6 +57,7 @@ def _clean_header(header): ...@@ -57,6 +57,7 @@ def _clean_header(header):
class NoValidHeader(Exception): class NoValidHeader(Exception):
pass pass
def get_header(filename): def get_header(filename):
"""Open an md file identified by filename and read out the yaml """Open an md file identified by filename and read out the yaml
header. header.
......
...@@ -29,6 +29,7 @@ import argparse ...@@ -29,6 +29,7 @@ import argparse
import caosdb as db import caosdb as db
from argparse import RawTextHelpFormatter from argparse import RawTextHelpFormatter
def get_parser(): def get_parser():
parser = argparse.ArgumentParser(description=__doc__, parser = argparse.ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter) formatter_class=RawTextHelpFormatter)
......
...@@ -27,6 +27,7 @@ import caosdb as db ...@@ -27,6 +27,7 @@ import caosdb as db
from caosadvancedtools.cfood import AbstractCFood from caosadvancedtools.cfood import AbstractCFood
class ExampleCFood(AbstractCFood): class ExampleCFood(AbstractCFood):
def create_identifiables(self, crawled_file, match): def create_identifiables(self, crawled_file, match):
print("create_identifiables") print("create_identifiables")
......
...@@ -36,14 +36,14 @@ def main(folder, dry=True): ...@@ -36,14 +36,14 @@ def main(folder, dry=True):
if not os.path.exists(base_path) and not dry: if not os.path.exists(base_path) and not dry:
os.system("mkdir -p "+base_path) os.system("mkdir -p "+base_path)
for ii, ser in enumerate(["Series_{}".format(i) for i in range(3)]): for ii, ser in enumerate(["Series_{}".format(i) for i in range(3)]):
series_path = os.path.join(base_path, ser) series_path = os.path.join(base_path, ser)
print("Series:\n"+ser +"\n") print("Series:\n"+ser + "\n")
if not dry: if not dry:
os.mkdir(series_path) os.mkdir(series_path)
for date in [datetime.today()-timedelta(days=i)-timedelta(weeks=50*ii) for i in range(10)]: for date in [datetime.today()-timedelta(days=i)-timedelta(weeks=50*ii) for i in range(10)]:
#import IPython #import IPython
#IPython.embed() # IPython.embed()
exp_path = os.path.join(series_path, "Exp_"+str(date.date())) exp_path = os.path.join(series_path, "Exp_"+str(date.date()))
print("Exp: "+os.path.basename(exp_path)) print("Exp: "+os.path.basename(exp_path))
if not dry: if not dry:
...@@ -62,11 +62,6 @@ def main(folder, dry=True): ...@@ -62,11 +62,6 @@ def main(folder, dry=True):
os.system("touch "+d_path) os.system("touch "+d_path)
def get_parser(): def get_parser():
parser = argparse.ArgumentParser(description=__doc__, parser = argparse.ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter) formatter_class=RawTextHelpFormatter)
......
...@@ -39,6 +39,7 @@ class CFoodTest(unittest.TestCase): ...@@ -39,6 +39,7 @@ class CFoodTest(unittest.TestCase):
def test_check_existence(self): def test_check_existence(self):
assert AbstractCFood.find_existing(self.exp) is None assert AbstractCFood.find_existing(self.exp) is None
class CFoodTestExist(CFoodTest): class CFoodTestExist(CFoodTest):
def setUp(self): def setUp(self):
super().setUp() super().setUp()
...@@ -46,7 +47,7 @@ class CFoodTestExist(CFoodTest): ...@@ -46,7 +47,7 @@ class CFoodTestExist(CFoodTest):
def test_check_existence(self): def test_check_existence(self):
res = AbstractCFood.find_existing(self.exp) res = AbstractCFood.find_existing(self.exp)
assert res.id == self.exp.id assert res.id == self.exp.id
def tearDown(self): def tearDown(self):
self.exp.delete() self.exp.delete()
...@@ -82,6 +82,7 @@ class ToTsvTest(unittest.TestCase): ...@@ -82,6 +82,7 @@ class ToTsvTest(unittest.TestCase):
class IntegrationTest(unittest.TestCase): class IntegrationTest(unittest.TestCase):
""" converts tsv to a container and back and compares origin with """ converts tsv to a container and back and compares origin with
result """ result """
def test_backandforth(self): def test_backandforth(self):
cont = from_tsv(TEST_TABLE, "Measurement") cont = from_tsv(TEST_TABLE, "Measurement")
tempfile = NamedTemporaryFile(delete=False) tempfile = NamedTemporaryFile(delete=False)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment