diff --git a/src/caosadvancedtools/serverside/examples/example_script.py b/src/caosadvancedtools/serverside/examples/example_script.py
index 7f17c7f048e6e255186b6913bbe19b36bf4a7d23..c57fb68c83f4249d532554d23d58a3d53dd66d81 100755
--- a/src/caosadvancedtools/serverside/examples/example_script.py
+++ b/src/caosadvancedtools/serverside/examples/example_script.py
@@ -27,7 +27,7 @@
 An exemplary script that illustrates how scripts can be used in conjunction
 with the generic_analysis module.
 
-Data model:
+The data model needed for this script is:
 
 Analysis:
     sources: REFEERENCE
@@ -48,13 +48,17 @@ from datetime import datetime
 import caosdb as db
 import matplotlib.pyplot as plt
 import numpy as np
-from caosadvancedtools.cfood import assure_property_is, get_property
+from caosadvancedtools.cfood import assure_property_is
+from caosadvancedtools.crawler import apply_list_of_updates
 from caosadvancedtools.guard import INSERT, UPDATE
 from caosadvancedtools.guard import global_guard as guard
 from caosadvancedtools.serverside.helper import send_mail as main_send_mail
 
+# logging should be done like this in order to allow the caller script to
+# direct the output.
 logger = logging.getLogger(__name__)
 
+# allow updates of existing entities
 guard.set_level(level=UPDATE)
 
 
@@ -65,6 +69,7 @@ def send_mail(changes, receipient):
     Parameters:
     -----------
     changes: The CaosDB entities in the version after the update.
+    receipient: The person who shall receive the mail.
     """
 
     caosdb_config = db.configuration.get_config()
@@ -90,10 +95,12 @@ The following changes where done automatically.
 
 def main(args):
 
+    # auth_token is provided by the server side scripting API
+    # use this token for authentication when creating a new connection
     if hasattr(args, "auth_token") and args.auth_token:
         db.configure_connection(auth_token=args.auth_token)
+        logger.debug("Established connection")
 
-    # TODO (maybe) can these checks be replaced by a more declaritive appoach?
     try:
         dataAnalysisRecord = db.Record(id=args.entityid).retrieve()
     except db.TransactionError:
@@ -111,6 +118,8 @@ def main(args):
 
         raise RuntimeError("sources Refenrence must exist.")
 
+    logger.debug("Found required data.")
+
     # ####### this core might be replaced by a call to another script ####### #
     # Download the data
     source_val = dataAnalysisRecord.get_property("sources").value
@@ -119,6 +128,7 @@ def main(args):
             if isinstance(source_val, list)
             else source_val)).retrieve()
     npfile = npobj.download()
+    logger.debug("Downloaded data.")
     data = np.load(npfile)
 
     # Plot data
@@ -134,10 +144,9 @@ def main(args):
                   path="/Analysis/results/"+str(datetime.now())+"/"+filename)
     fig.insert()
 
-    # Add the result to the analysis Record
-    # An update should only be done if necessary: assure_property_is should be
-    # used instead of direct calls to 'update'.
-
+    # Add the mean value to the analysis Record
+    # If such a property existed before, it is changed if necessary. The old
+    # value will persist in the versioning of LinkAhead
     to_be_updated = db.Container()
     assure_property_is(
         dataAnalysisRecord,
@@ -145,8 +154,11 @@ def main(args):
         mean,
         to_be_updated=to_be_updated
     )
-    # TODO (maybe) this is not really meaningful since an uploaded file will
-    # always be different.... Compare checksums of files?
+
+    # Add the file with the plot to the analysis Record
+    # If a file was already referenced, the new one will be referenced instead.
+    # The old file is being kept and is still referenced in an old version of
+    # the analysis Record.
     assure_property_is(
         dataAnalysisRecord,
         "results",
@@ -156,24 +168,27 @@ def main(args):
 
     if len(to_be_updated) > 0:
         print(to_be_updated)
-        to_be_updated.update()
+        apply_list_of_updates(to_be_updated, update_flags={})
+        logger.debug("Update sucessful.")
+        logger.info("The following Entities were changed:\n{}.".format(
+            [el.id for el in to_be_updated])
+        )
+
+        # Send mails to people that are referenced.
         people = db.execute_query("FIND RECORD Person WHICH IS REFERENCED BY "
                                   "{}".format(dataAnalysisRecord.id))
         for person in people:
             if person.get_property("Email") is not None:
                 send_mail([str(el) for el in to_be_updated],
                           receipient=person.get_property("Email").value)
-
-    # TODO (must) what should be done with the old file? Removed if not referenced?
-
-    # TODO (maybe) inform about updates (reuse stuff from crawler.py?)
-    # TODO (must) sketch meaningful logging
+        logger.debug("Mails send.")
 
 
 def parse_args():
     parser = argparse.ArgumentParser(description=__doc__,
                                      formatter_class=RawTextHelpFormatter)
-    parser.add_argument("--auth-token")
+    parser.add_argument("--auth-token",
+                        help="Token provided by the server for authentication")
     parser.add_argument("entityid",
                         help="The ID of the DataAnalysis Record.", type=int)