Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • caosdb/src/caosdb-advanced-user-tools
1 result
Show changes
Commits on Source (5)
......@@ -39,9 +39,11 @@ Optional h5-crawler:
extroot. E.g. `sudo mount -o bind extroot
../../caosdb-deploy/profiles/empty/paths/extroot` (or whatever path
the extroot of the empty profile to be used is located at).
3. Start an empty (!) CaosDB instance (with the mounted extroot). The
database will be cleared during testing, so it's important to use
3. Start (or restart) an empty (!) CaosDB instance (with the mounted extroot).
The database will be cleared during testing, so it's important to use
an empty instance.
Make sure your configuration for the python caosdb module is correct and
allows to connect to the server.
4. Run `test.sh`. Note that this may modify content of the `integrationtest/extroot/` directory.
## Code Formatting
......
......@@ -34,11 +34,11 @@ echo "Filling the database"
echo "Testing the crawler database"
python3 -m pytest test_crawler_with_cfoods.py
echo "make a change"
pushd extroot
cd extroot
egrep -liRZ 'A description of another example' . | xargs -0 -l sed -i -e 's/A description of another example/A description of this example/g'
# remove a file to check that this does not lead to a crawler crash
mv DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx_back
popd
cd ..
echo "run crawler"
./crawl.py / | tee $OUT
# rename the moved file
......@@ -58,9 +58,9 @@ then
fi
set -e
echo "undo changes"
pushd extroot
cd extroot
egrep -liRZ 'A description of this example' . | xargs -0 -l sed -i -e 's/A description of this example/A description of another example/g'
popd
cd ..
python3 test_table.py
# TODO the following test deletes lots of the data inserted by the crawler
echo "Testing im and export"
......
......@@ -3,15 +3,14 @@ import os
from tempfile import TemporaryDirectory
import caosdb as db
from caosadvancedtools.export_related import export
from caosadvancedtools.export_related import export_related_to
from caosadvancedtools.import_from_xml import import_xml
if __name__ == "__main__":
print("Conducting im- and export tests")
rec = db.execute_query("FIND 2019-02-03_really_cool_finding", unique=True)
directory = TemporaryDirectory()
export(rec.id, directory=directory.name)
export_related_to(rec.id, directory=directory.name)
# delete everything
recs = db.execute_query("FIND entity with id>99")
recs.delete()
......
......@@ -26,14 +26,19 @@ import argparse
import os
import caosdb as db
from caosdb.apiutils import retrieve_entities_with_ids
from export_related import export
def get_dm():
rts = set([r.name for r in db.execute_query("SELECT name FROM RECORDTYPE")])
rts = set([(r.id, r.name) for r
in db.execute_query("SELECT name FROM RECORDTYPE")])
if None in rts:
rts.remove(None)
ps = set([r.name for r in db.execute_query("SELECT name FROM PROPERTY")])
ps = set([(r.id, r.name) for r
in db.execute_query("SELECT name FROM PROPERTY")])
if None in ps:
ps.remove(None)
......@@ -47,18 +52,26 @@ def get_parser():
"be stored")
p.add_argument("-c", "--compare", help="directory where the datamodel that"
" shall be compared is stored")
p.add_argument("-x", "--xml", action="store_true",
help="store xml as well")
return p
def store(directory):
def store(directory, xml=False):
rts, ps = get_dm()
os.makedirs(directory, exist_ok=True)
with open(os.path.join(directory, "recordtypes.txt"), "w") as fi:
fi.write(",".join(rts))
fi.write(",".join([el[1] for el in rts]))
with open(os.path.join(directory, "properties.txt"), "w") as fi:
fi.write(",".join(ps))
fi.write(",".join([el[1] for el in ps]))
if xml:
cont = retrieve_entities_with_ids(
[el[0] for el in rts]+[el[0] for el in ps])
export(cont, directory)
def load_dm(directory):
......@@ -104,7 +117,7 @@ if __name__ == "__main__":
args = p.parse_args()
if args.store:
store(args.store)
store(args.store, xml=args.xml)
if args.compare:
compare(args.compare)
......@@ -56,6 +56,7 @@ from .datainconsistency import DataInconsistencyError
from .datamodel_problems import DataModelProblems
from .guard import RETRIEVE, ProhibitedException
from .guard import global_guard as guard
from .serverside.helper import send_mail as main_send_mail
from .suppressKnown import SuppressKnown
logger = logging.getLogger(__name__)
......@@ -500,7 +501,6 @@ carefully and if the changes are ok, click on the following link:
""".format(url=caosdb_config["Connection"]["url"],
filename=filename,
changes="\n".join(changes))
sendmail = caosdb_config["Misc"]["sendmail"]
try:
fro = caosdb_config["advancedtools"]["crawler.from_mail"]
to = caosdb_config["advancedtools"]["crawler.to_mail"]
......@@ -510,8 +510,11 @@ carefully and if the changes are ok, click on the following link:
"'from_mail' and 'to_mail'.")
return
p = subprocess.Popen([sendmail, "-f", fro, to], stdin=subprocess.PIPE)
p.communicate(input=text.encode())
main_send_mail(
from_addr=fro,
to=to,
subject="Crawler Update",
body=text)
def push_identifiables_to_CaosDB(self, cfood):
"""
......
......@@ -96,12 +96,15 @@ def invert_ids(entities):
apply_to_ids(entities, lambda x: x*-1)
def export(rec_id, directory="."):
def export_related_to(rec_id, directory="."):
if not isinstance(rec_id, int):
raise ValueError("rec_id needs to be an integer")
ent = db.execute_query("FIND {}".format(rec_id), unique=True)
cont = recursively_collect_related(ent)
export(cont, directory=directory)
def export(cont, directory="."):
directory = os.path.abspath(directory)
dl_dir = os.path.join(directory, "downloads")
......@@ -119,6 +122,9 @@ def export(rec_id, directory="."):
print("Failed download of:", target)
invert_ids(cont)
for el in cont:
el.version = None
xml = etree.tounicode(cont.to_xml(
local_serialization=True), pretty_print=True)
......@@ -147,4 +153,4 @@ if __name__ == "__main__":
parser = defineParser()
args = parser.parse_args()
export(args.id, directory=args.directory)
export_related_to(args.id, directory=args.directory)
......@@ -316,19 +316,22 @@ class Parser(object):
self.model[name].description = prop
elif prop_name == "recommended_properties":
self._add_to_recordtype(name, prop, importance=db.RECOMMENDED)
self._add_to_recordtype(
name, prop, importance=db.RECOMMENDED)
for n, e in prop.items():
self._treat_entity(n, e)
elif prop_name == "obligatory_properties":
self._add_to_recordtype(name, prop, importance=db.OBLIGATORY)
self._add_to_recordtype(
name, prop, importance=db.OBLIGATORY)
for n, e in prop.items():
self._treat_entity(n, e)
elif prop_name == "suggested_properties":
self._add_to_recordtype(name, prop, importance=db.SUGGESTED)
self._add_to_recordtype(
name, prop, importance=db.SUGGESTED)
for n, e in prop.items():
self._treat_entity(n, e)
......@@ -355,9 +358,13 @@ class Parser(object):
self.treated.append(name)
def _check_datatypes(self):
""" checks if datatype is valid.
datatype of properties is simply initialized with string. Here over
properties is iterated and datatype is corrected. """
"""Checks if the datatype is valid.
Iterate over all properties in this datamodel and choose a
valid datatype from the original datatype string in the yaml
file. Raise a ValueError if no datatype can be found.
"""
for key, value in self.model.items():
if isinstance(value, db.Property):
......@@ -368,7 +375,8 @@ class Parser(object):
try:
value.datatype = db.__getattribute__(value.datatype)
except AttributeError:
raise ValueError("Unknown Datatype.")
raise ValueError(
"Property {} has an unknown datatype: {}".format(value.name, value.datatype))
def _set_recordtypes(self):
""" properties are defined in first iteration; set remaining as RTs """
......