Skip to content
Snippets Groups Projects
Commit 7328d10c authored by Henrik tom Wörden's avatar Henrik tom Wörden
Browse files

WIP: Update: example script

parent f38b5bd4
Branches
Tags
2 merge requests!39Release 0.4.0,!20created draft for generic analysis method
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
An exemplary script that illustrates how scripts can be used in conjunction An exemplary script that illustrates how scripts can be used in conjunction
with the generic_analysis module. with the generic_analysis module.
Data model: The data model needed for this script is:
Analysis: Analysis:
sources: REFEERENCE sources: REFEERENCE
...@@ -48,13 +48,17 @@ from datetime import datetime ...@@ -48,13 +48,17 @@ from datetime import datetime
import caosdb as db import caosdb as db
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
from caosadvancedtools.cfood import assure_property_is, get_property from caosadvancedtools.cfood import assure_property_is
from caosadvancedtools.crawler import apply_list_of_updates
from caosadvancedtools.guard import INSERT, UPDATE from caosadvancedtools.guard import INSERT, UPDATE
from caosadvancedtools.guard import global_guard as guard from caosadvancedtools.guard import global_guard as guard
from caosadvancedtools.serverside.helper import send_mail as main_send_mail from caosadvancedtools.serverside.helper import send_mail as main_send_mail
# logging should be done like this in order to allow the caller script to
# direct the output.
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# allow updates of existing entities
guard.set_level(level=UPDATE) guard.set_level(level=UPDATE)
...@@ -65,6 +69,7 @@ def send_mail(changes, receipient): ...@@ -65,6 +69,7 @@ def send_mail(changes, receipient):
Parameters: Parameters:
----------- -----------
changes: The CaosDB entities in the version after the update. changes: The CaosDB entities in the version after the update.
receipient: The person who shall receive the mail.
""" """
caosdb_config = db.configuration.get_config() caosdb_config = db.configuration.get_config()
...@@ -90,10 +95,12 @@ The following changes where done automatically. ...@@ -90,10 +95,12 @@ The following changes where done automatically.
def main(args): def main(args):
# auth_token is provided by the server side scripting API
# use this token for authentication when creating a new connection
if hasattr(args, "auth_token") and args.auth_token: if hasattr(args, "auth_token") and args.auth_token:
db.configure_connection(auth_token=args.auth_token) db.configure_connection(auth_token=args.auth_token)
logger.debug("Established connection")
# TODO (maybe) can these checks be replaced by a more declaritive appoach?
try: try:
dataAnalysisRecord = db.Record(id=args.entityid).retrieve() dataAnalysisRecord = db.Record(id=args.entityid).retrieve()
except db.TransactionError: except db.TransactionError:
...@@ -111,6 +118,8 @@ def main(args): ...@@ -111,6 +118,8 @@ def main(args):
raise RuntimeError("sources Refenrence must exist.") raise RuntimeError("sources Refenrence must exist.")
logger.debug("Found required data.")
# ####### this core might be replaced by a call to another script ####### # # ####### this core might be replaced by a call to another script ####### #
# Download the data # Download the data
source_val = dataAnalysisRecord.get_property("sources").value source_val = dataAnalysisRecord.get_property("sources").value
...@@ -119,6 +128,7 @@ def main(args): ...@@ -119,6 +128,7 @@ def main(args):
if isinstance(source_val, list) if isinstance(source_val, list)
else source_val)).retrieve() else source_val)).retrieve()
npfile = npobj.download() npfile = npobj.download()
logger.debug("Downloaded data.")
data = np.load(npfile) data = np.load(npfile)
# Plot data # Plot data
...@@ -134,10 +144,9 @@ def main(args): ...@@ -134,10 +144,9 @@ def main(args):
path="/Analysis/results/"+str(datetime.now())+"/"+filename) path="/Analysis/results/"+str(datetime.now())+"/"+filename)
fig.insert() fig.insert()
# Add the result to the analysis Record # Add the mean value to the analysis Record
# An update should only be done if necessary: assure_property_is should be # If such a property existed before, it is changed if necessary. The old
# used instead of direct calls to 'update'. # value will persist in the versioning of LinkAhead
to_be_updated = db.Container() to_be_updated = db.Container()
assure_property_is( assure_property_is(
dataAnalysisRecord, dataAnalysisRecord,
...@@ -145,8 +154,11 @@ def main(args): ...@@ -145,8 +154,11 @@ def main(args):
mean, mean,
to_be_updated=to_be_updated to_be_updated=to_be_updated
) )
# TODO (maybe) this is not really meaningful since an uploaded file will
# always be different.... Compare checksums of files? # Add the file with the plot to the analysis Record
# If a file was already referenced, the new one will be referenced instead.
# The old file is being kept and is still referenced in an old version of
# the analysis Record.
assure_property_is( assure_property_is(
dataAnalysisRecord, dataAnalysisRecord,
"results", "results",
...@@ -156,24 +168,27 @@ def main(args): ...@@ -156,24 +168,27 @@ def main(args):
if len(to_be_updated) > 0: if len(to_be_updated) > 0:
print(to_be_updated) print(to_be_updated)
to_be_updated.update() apply_list_of_updates(to_be_updated, update_flags={})
logger.debug("Update sucessful.")
logger.info("The following Entities were changed:\n{}.".format(
[el.id for el in to_be_updated])
)
# Send mails to people that are referenced.
people = db.execute_query("FIND RECORD Person WHICH IS REFERENCED BY " people = db.execute_query("FIND RECORD Person WHICH IS REFERENCED BY "
"{}".format(dataAnalysisRecord.id)) "{}".format(dataAnalysisRecord.id))
for person in people: for person in people:
if person.get_property("Email") is not None: if person.get_property("Email") is not None:
send_mail([str(el) for el in to_be_updated], send_mail([str(el) for el in to_be_updated],
receipient=person.get_property("Email").value) receipient=person.get_property("Email").value)
logger.debug("Mails send.")
# TODO (must) what should be done with the old file? Removed if not referenced?
# TODO (maybe) inform about updates (reuse stuff from crawler.py?)
# TODO (must) sketch meaningful logging
def parse_args(): def parse_args():
parser = argparse.ArgumentParser(description=__doc__, parser = argparse.ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter) formatter_class=RawTextHelpFormatter)
parser.add_argument("--auth-token") parser.add_argument("--auth-token",
help="Token provided by the server for authentication")
parser.add_argument("entityid", parser.add_argument("entityid",
help="The ID of the DataAnalysis Record.", type=int) help="The ID of the DataAnalysis Record.", type=int)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment