diff --git a/src/newcrawler/crawl.py b/src/newcrawler/crawl.py
index 1cf6eba149dcd537e3eaf3a392f2dcb681979dab..49b72d46c1c6f90f4331583ec37cb2f1a208141f 100644
--- a/src/newcrawler/crawl.py
+++ b/src/newcrawler/crawl.py
@@ -38,7 +38,7 @@ from .converters import Converter, DirectoryConverter
 from .identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
 from collections import defaultdict
 from typing import Union, Any, Optional
-from caosdb.apiutils import compare_entities
+from caosdb.apiutils import compare_entities, merge_entities
 from copy import deepcopy
 from jsonschema import validate
 
@@ -337,12 +337,8 @@ class Crawler(object):
         """
         Copy all attributes from one entity to another entity.
         """
-        # add missing parents:
-        for parent in fro.parents:
-            if to.get_parent(parent.name) is None:
-                to.add_parent(parent)
-        # TODO: unfinished
-        raise NotImplementedError()
+
+        merge_entities(to, fro)
 
     def split_into_inserts_and_updates(self, ent_list: list[db.Entity]):
         if self.identifiableAdapter is None:
@@ -377,8 +373,7 @@ class Crawler(object):
                     # information
                     # Update an (local) identified record that will be inserted
                     newrecord = self.get_identified_record_from_local_cache(record)
-                    # breakpoint()
-                    # self.copy_attributes(fro=record, to=newrecord)
+                    self.copy_attributes(fro=record, to=newrecord)
                     # Bend references to the other object
                     # TODO refactor this
                     for el in flat + to_be_inserted + to_be_updated:
diff --git a/unittests/test_tool.py b/unittests/test_tool.py
index ccfcee1cfcd1736a7d1dd17c62a2d3a52d89b648..06328deb81a195739a24ccd89581ca955bb03b7f 100755
--- a/unittests/test_tool.py
+++ b/unittests/test_tool.py
@@ -173,7 +173,6 @@ def test_ambigious_records(crawler, ident):
 
 
 def test_crawler_update_list(crawler, ident):
-    crawler.copy_attributes = Mock()
     # If the following assertions fail, that is a hint, that the test file records.xml has changed
     # and this needs to be updated:
     assert len(ident.get_records()) == 18
@@ -334,8 +333,6 @@ def mock_retrieve(crawler):
         else:
             return None
 
-    crawler.copy_attributes = Mock()
-
     # a record that is found remotely and should be added to the update list and one that is not
     # found and should be added to the insert one
     remote_known = {"A": db.Record(id=1111, name="A")}
@@ -432,7 +429,6 @@ def test_split_into_inserts_and_updates_with_complex(mock_retrieve):
     # TODO write test where the unresoled entity is not part of the identifiable
 
 
-@pytest.mark.xfail
 def test_split_into_inserts_and_updates_with_copy_attr(mock_retrieve):
     crawler = mock_retrieve
     # assume identifiable is only the name
@@ -443,7 +439,6 @@ def test_split_into_inserts_and_updates_with_copy_attr(mock_retrieve):
     entlist = [a, b]
     insert, update = crawler.split_into_inserts_and_updates(entlist)
 
-    # expected TODO
     assert update[0].get_property("bar").value == 2
     assert update[0].get_property("foo").value == 1