diff --git a/src/caosadvancedtools/crawler.py b/src/caosadvancedtools/crawler.py
index fa6fa5ad6b82d823599176d118a83cde422099d7..f4aefb7d883ab9ddb6a009ad3dbba52ca402b393 100644
--- a/src/caosadvancedtools/crawler.py
+++ b/src/caosadvancedtools/crawler.py
@@ -118,7 +118,8 @@ class Crawler(object):
         """ generates items to be crawled with an index"""
         yield 0, None
 
-    def update_authorized_changes(self, run_id):
+    @staticmethod
+    def update_authorized_changes(run_id):
         """
         execute the pending updates of a specific run id.
 
@@ -128,7 +129,8 @@ class Crawler(object):
         -----------
         run_id: the id of the crawler run
         """
-        changes = self.update_cache.get_updates(run_id)
+        cache = UpdateCache()
+        changes = cache.get_updates(run_id)
 
         for _, _, old, new, _ in changes:
             current = db.Container()
@@ -147,6 +149,9 @@ class Crawler(object):
                 continue
 
             new_cont.update(unique=False)
+            logger.info("Successfully updated {} records!".format(
+                len(new_cont)))
+        logger.info("Finished with authorized updates.")
 
     def collect_cfoods(self):
         """
diff --git a/src/caosadvancedtools/loadFiles.py b/src/caosadvancedtools/loadFiles.py
index 26907f766c1f33e7ad57abd464d29e62c69c4ecd..445374b3b3bddf94eefd7952e190bf52155765a8 100755
--- a/src/caosadvancedtools/loadFiles.py
+++ b/src/caosadvancedtools/loadFiles.py
@@ -24,12 +24,14 @@
 #
 
 import argparse
+import logging
 import math
 import sys
 from argparse import ArgumentParser
 
 import caosdb as db
 
+logger = logging.getLogger(__name__)
 timeout_fallback = 20
 
 
@@ -37,8 +39,8 @@ def convert_size(size):
     if (size == 0):
         return '0B'
     size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
-    i = int(math.floor(math.log(size, 1024)))
-    p = math.pow(1024, i)
+    i = int(math.floor(math.log(size, 1000)))
+    p = math.pow(1000, i)
     s = round(size / p, 2)
 
     return '%s %s' % (s, size_name[i])
@@ -47,7 +49,7 @@ def convert_size(size):
 def loadpath(path, include, exclude, prefix, dryrun, forceAllowSymlinks):
 
     if dryrun:
-        print("DRYRUN")
+        logger.info("Performin a dryrun!")
         files = db.Container().retrieve(
             unique=False,
             raise_exception_on_error=True,
@@ -73,8 +75,8 @@ def loadpath(path, include, exclude, prefix, dryrun, forceAllowSymlinks):
     for f in files:
         totalsize += f.size
 
-    print("\n\nTOTAL " + str(len(files)) +
-          " NEW files (" + convert_size(totalsize) + ")")
+    logger.info("Made in total {} new files with a combined size of {} "
+                "accessible.".format(len(files), convert_size(totalsize)))
 
     return