diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index 6911e2bf0ba90e85d6bfdbd70effdcda01e79426..422ea9639c4bc496f46fc668ace6cc9754ece305 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -49,6 +49,7 @@ from typing import Any, Optional, Type, Union
 
 import caosdb as db
 
+from caosadvancedtools.utils import create_entity_link
 from caosadvancedtools.cache import UpdateCache, Cache
 from caosadvancedtools.crawler import Crawler as OldCrawler
 from caosdb.apiutils import (compare_entities, EntityMergeConflictError,
@@ -1022,20 +1023,20 @@ class Crawler(object):
         # to the existing ones
         to_be_updated = self.remove_unnecessary_updates(to_be_updated, identified_records)
 
-        logger.info(f"Going to insert {len(to_be_inserted)} Entities:\n"
-                    + self.create_entity_summary(to_be_inserted))
-        logger.info(f"Going to update {len(to_be_inserted)} Entities:\n"
-                    + self.create_entity_summary(to_be_updated))
+        logger.info(f"Going to insert {len(to_be_inserted)} Entities and update "
+                    f"{len(to_be_inserted)} Entities.")
         if commit_changes:
             self.execute_parent_updates_in_list(to_be_updated, securityMode=self.securityMode,
                                                 run_id=self.run_id, unique_names=unique_names)
             logger.info(f"Added parent RecordTypes where necessary.")
             self.execute_inserts_in_list(
                 to_be_inserted, self.securityMode, self.run_id, unique_names=unique_names)
-            logger.info(f"Executed inserts.")
+            logger.info(f"Executed inserts:\n"
+                        + self.create_entity_summary(to_be_inserted))
             self.execute_updates_in_list(
                 to_be_updated, self.securityMode, self.run_id, unique_names=unique_names)
-            logger.info(f"Executed updates.")
+            logger.info(f"Executed updates:\n"
+                        + self.create_entity_summary(to_be_updated))
 
         update_cache = UpdateCache()
         pending_inserts = update_cache.get_inserts(self.run_id)
@@ -1059,7 +1060,7 @@ class Crawler(object):
                 if pp.name not in parents:
                     parents[pp.name] = []
                 else:
-                    parents[pp.name].append(el.id)
+                    parents[pp.name].append(el)
         output = ""
         for key, value in parents.items():
             output += f"{key}:\n"