diff --git a/integrationtests/test.sh b/integrationtests/test.sh
index 36730cc948d308659f01f6153f86a917ab1909d0..ab6ea545a34c5288d5eb5119ab8c7e9b11b5c445 100755
--- a/integrationtests/test.sh
+++ b/integrationtests/test.sh
@@ -58,6 +58,8 @@ echo "./crawl.py -a $RUN_ID /"
 set +e
 if grep "There where unauthorized changes" $OUT
 then 
+    echo "There were still unauthorized changes which should not have happend!"
+    echo "Test FAILED!"
     exit 1
 fi
 set -e
diff --git a/integrationtests/test_im_und_export.py b/integrationtests/test_im_und_export.py
index cd6782c2d83c4f6bb2b50e8dee04674e4fc45d0c..3940ccce7ecb0d4a6c691078896ad46c55ce995d 100644
--- a/integrationtests/test_im_und_export.py
+++ b/integrationtests/test_im_und_export.py
@@ -14,7 +14,8 @@ if __name__ == "__main__":
     # delete everything
     print("Clearing database")
     recs = db.execute_query("FIND ENTITY entity with id>99")
-    recs.delete()
+    if len(recs) > 0:
+        recs.delete()
     assert 0 == len(db.execute_query("FIND File which is stored at "
                                      "**/poster.pdf"))
     print("Importing stored elements")
diff --git a/src/caosadvancedtools/cfood.py b/src/caosadvancedtools/cfood.py
index 4a9f955a17fc429deb6cdd10c3645700e579b4df..c0da4f0156dc2af48a4ba80b4d0af69c62cd5c3e 100644
--- a/src/caosadvancedtools/cfood.py
+++ b/src/caosadvancedtools/cfood.py
@@ -807,7 +807,7 @@ class RowCFood(AbstractCFood):
     def update_identifiables(self):
         rec = self.identifiables[0]
 
-        for key, value in self.item.iteritems():
+        for key, value in self.item.items():
             if key in self.unique_cols:
                 continue
             assure_property_is(rec, key,
diff --git a/src/caosadvancedtools/crawler.py b/src/caosadvancedtools/crawler.py
index 41c8871f686fde7bae7d36c29e97619f97f0ce38..cbf5e98f82fc50afac79f2938a4fdccfd376b1ad 100644
--- a/src/caosadvancedtools/crawler.py
+++ b/src/caosadvancedtools/crawler.py
@@ -212,7 +212,7 @@ class Crawler(object):
             new_cont.insert(unique=False)
             logger.info("Successfully inserted {} records!".format(len(new_cont)))
             all_inserts += len(new_cont)
-        logger.info("Finished with authorized updates.")
+        logger.info("Finished with authorized insertes.")
 
         changes = cache.get_updates(run_id)
 
@@ -220,7 +220,7 @@ class Crawler(object):
             new_cont = db.Container()
             new_cont = new_cont.from_xml(new)
             ids = []
-            tmp = []
+            tmp = db.Container()
             update_incomplete = False
             # remove duplicate entities
             for el in new_cont:
@@ -237,7 +237,7 @@ class Crawler(object):
                 for ent in new_cont:
                     remote_ent = db.Entity(id=ent.id).retrieve()
                     if ent.version == remote_ent.version:
-                        valids.append(remote_ent)
+                        valids.append(ent)
                     else:
                         update_incomplete = True
                         nonvalids.append(remote_ent)