diff --git a/src/caosadvancedtools/serverside/examples/example_script.py b/src/caosadvancedtools/serverside/examples/example_script.py index de68241b41555afde9f315ea13923cd092dad679..7f17c7f048e6e255186b6913bbe19b36bf4a7d23 100755 --- a/src/caosadvancedtools/serverside/examples/example_script.py +++ b/src/caosadvancedtools/serverside/examples/example_script.py @@ -26,6 +26,17 @@ """ An exemplary script that illustrates how scripts can be used in conjunction with the generic_analysis module. + +Data model: + +Analysis: + sources: REFEERENCE + scripts: FILE + results: REFEERENCE + mean_value: DOUBLE + +Person: + Email: TEXT """ import argparse @@ -37,7 +48,7 @@ from datetime import datetime import caosdb as db import matplotlib.pyplot as plt import numpy as np -from caosadvancedtools.cfood import assure_property_is +from caosadvancedtools.cfood import assure_property_is, get_property from caosadvancedtools.guard import INSERT, UPDATE from caosadvancedtools.guard import global_guard as guard from caosadvancedtools.serverside.helper import send_mail as main_send_mail @@ -47,7 +58,7 @@ logger = logging.getLogger(__name__) guard.set_level(level=UPDATE) -def send_mail(self, changes, receipient): +def send_mail(changes, receipient): """ calls sendmail in order to send a mail to the curator about pending changes @@ -78,11 +89,15 @@ The following changes where done automatically. def main(args): + + if hasattr(args, "auth_token") and args.auth_token: + db.configure_connection(auth_token=args.auth_token) + # TODO (maybe) can these checks be replaced by a more declaritive appoach? try: dataAnalysisRecord = db.Record(id=args.entityid).retrieve() except db.TransactionError: - logger.error("Cannot retrieve dataAnalysisRecord with id ={}".format( + logger.error("Cannot retrieve Record with id ={}".format( args.entityid )) @@ -123,40 +138,42 @@ def main(args): # An update should only be done if necessary: assure_property_is should be # used instead of direct calls to 'update'. - to_be_updated = [] + to_be_updated = db.Container() assure_property_is( dataAnalysisRecord, "mean_value", mean, to_be_updated=to_be_updated ) - # TODO (maybe) this is not really meaningful since an uploaded file will always - # be different.... Compare checksums of files? + # TODO (maybe) this is not really meaningful since an uploaded file will + # always be different.... Compare checksums of files? assure_property_is( dataAnalysisRecord, "results", - fig.id, + [fig.id], to_be_updated=to_be_updated ) if len(to_be_updated) > 0: - people = db.execute_query("FIND RECORD Person WHICH IS REFERENCEED BY " + print(to_be_updated) + to_be_updated.update() + people = db.execute_query("FIND RECORD Person WHICH IS REFERENCED BY " "{}".format(dataAnalysisRecord.id)) for person in people: if person.get_property("Email") is not None: - send_mail(to_be_updated, receipient=person.get_property( - "Email")) + send_mail([str(el) for el in to_be_updated], + receipient=person.get_property("Email").value) # TODO (must) what should be done with the old file? Removed if not referenced? # TODO (maybe) inform about updates (reuse stuff from crawler.py?) # TODO (must) sketch meaningful logging - # TODO (must) how to send an email? def parse_args(): parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) + parser.add_argument("--auth-token") parser.add_argument("entityid", help="The ID of the DataAnalysis Record.", type=int)