diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index ed8493fd2429ed42a4277c0bfca7644e93b08da6..ce4a617853b629295079ca8ac76bfa0eb6e93635 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -63,8 +63,7 @@ from .converters import Converter, ConverterValidationError
 from .debug_tree import DebugTree
 from .identifiable import Identifiable
 from .identifiable_adapters import (CaosDBIdentifiableAdapter,
-                                    IdentifiableAdapter,
-                                    LocalStorageIdentifiableAdapter)
+                                    IdentifiableAdapter)
 from .logging import configure_server_side_logging
 from .macros import defmacro_constructor, macro_constructor
 from .scanner import (create_converter_registry, initialize_converters,
@@ -221,7 +220,7 @@ class Crawler(object):
         # TODO does it make sense to have this as member variable(run_id)?
         self.generate_run_id()
 
-        self.identifiableAdapter: IdentifiableAdapter = LocalStorageIdentifiableAdapter()
+        self.identifiableAdapter: IdentifiableAdapter = CaosDBIdentifiableAdapter()
         if identifiableAdapter is not None:
             self.identifiableAdapter = identifiableAdapter
 
@@ -371,6 +370,7 @@ class Crawler(object):
                     print(st.nodes.index(se), "relies on unchecked")
                     continue
 
+                print(se.identifiable)
                 if se.identifiable is None:
                     st.set_identifiable_of_node(se, st.identifiableAdapter.get_identifiable(
                         se, st.backward_id_referenced_by[se.uuid]))
@@ -424,7 +424,7 @@ class Crawler(object):
                 f"Could not finish split_into_inserts_and_updates. Circular dependency: "
                 f"{circle is not None}")
 
-        return st.create_record_lists()
+        return st.export_record_lists()
 
     def replace_entities_with_ids(self, rec: db.Record):
         for el in rec.properties:
diff --git a/src/caoscrawler/identifiable.py b/src/caoscrawler/identifiable.py
index 02eba44a9f30b854f2313f976785f3fa17ec28c7..91eb45ce787128c55ab361c9c54821eda115cfd5 100644
--- a/src/caoscrawler/identifiable.py
+++ b/src/caoscrawler/identifiable.py
@@ -29,6 +29,8 @@ from typing import Union
 
 import linkahead as db
 
+from .sync_node import SyncNode
+
 logger = logging.getLogger(__name__)
 
 
@@ -90,9 +92,9 @@ class Identifiable():
 
         if value is None:
             return "None"
-        elif isinstance(value, db.File):
+        elif isinstance(value, db.File):  # TODO
             return str(value.path)
-        elif isinstance(value, db.Entity):
+        elif isinstance(value, SyncNode):
             if value.id is not None:
                 return str(value.id)
             else:
@@ -141,7 +143,7 @@ class Identifiable():
             return False
 
     def __repr__(self):
-        pstring = json.dumps(self.properties)
+        pstring = json.dumps({k: str(v) for k, v in self.properties.items()})
         return (f"{self.__class__.__name__} for RT {self.record_type}: id={self.record_id}; "
                 f"name={self.name}\n\tpath={self.path}\n"
                 f"\tproperties:\n{pstring}\n"
diff --git a/src/caoscrawler/identifiable_adapters.py b/src/caoscrawler/identifiable_adapters.py
index cfe27c3ffe82173af0283dcde7d4139dbb4571f5..7ecb1c55d8bb3d3a162811da474553e2ecb90a27 100644
--- a/src/caoscrawler/identifiable_adapters.py
+++ b/src/caoscrawler/identifiable_adapters.py
@@ -38,6 +38,7 @@ from linkahead.cached import cached_get_entity_by, cached_query
 from linkahead.utils.escape import escape_squoted_text
 
 from .identifiable import Identifiable
+from .sync_node import SyncNode
 from .utils import has_parent
 
 logger = logging.getLogger(__name__)
@@ -262,12 +263,6 @@ startswith: bool, optional
             # fill the values:
             for prop in registered_identifiable.properties:
                 if prop.name == "name":
-                    name_options = [f.name for f in se.fragments if f.name is not None]
-                    if len(name_options) == 0:
-                        raise RuntimeError("name is missing!")
-                    if not all([f == name_options[0] for f in name_options]):
-                        raise RuntimeError("differing names in fragments")
-                    name = name_options[0]
                     continue
                 # problem: what happens with multi properties?
                 # case A: in the registered identifiable
@@ -284,15 +279,14 @@ startswith: bool, optional
                         )
                     continue
 
-                options = [f.get_property(prop.name).value for f in se.fragments
-                           if f.get_property(prop.name) is not None]
+                options = [p.value for p in se.properties if p.name == prop.name]
                 if len(options) == 0:
                     raise NotImplementedError(
                         f"The following record is missing an identifying property:\n"
-                        f"RECORD\n{se.fragments[0]}\nIdentifying PROPERTY\n{prop.name}"
+                        f"RECORD\n{se}\nIdentifying PROPERTY\n{prop.name}"
                     )
                 for ii, el in enumerate(options):
-                    if isinstance(el, db.Entity):
+                    if isinstance(el, SyncNode):
                         options[ii] = el.id
                         if el.id is None:
                             raise RuntimeError("Reference to unchecked in identifiable:\n"
@@ -300,7 +294,7 @@ startswith: bool, optional
                     else:
                         options[ii] = el
                 if not all([f == options[0] for f in options]):
-                    raise RuntimeError("differing prop values in fragments")
+                    raise RuntimeError("differing prop values ")
 
                 identifiable_props[prop.name] = options[0]
                 property_name_list_A.append(prop.name)
@@ -322,7 +316,7 @@ startswith: bool, optional
                 path=se.path,
                 record_type=(registered_identifiable.parents[0].name
                              if registered_identifiable else None),
-                name=name,
+                name=se.name,
                 properties=identifiable_props,
                 backrefs=[e.id for e in identifiable_backrefs]
             )
diff --git a/src/caoscrawler/sync_graph.py b/src/caoscrawler/sync_graph.py
index dce6e05375e70443175272bb6c01098d6edebdd7..64058d374c8443a531176adf71d314d1974b45bc 100644
--- a/src/caoscrawler/sync_graph.py
+++ b/src/caoscrawler/sync_graph.py
@@ -26,7 +26,6 @@ A data model class for the semantic data that shall be created by synchronizatio
 from __future__ import annotations
 
 from typing import Any, Dict, List, Optional, Union
-from uuid import uuid4 as uuid
 
 import linkahead as db
 from linkahead.apiutils import (EntityMergeConflictError, compare_entities,
@@ -35,81 +34,19 @@ from linkahead.cached import cache_clear, cached_get_entity_by
 from linkahead.exceptions import EmptyUniqueQueryError
 
 from .identifiable_adapters import IdentifiableAdapter
-
-
-class SyncNode():
-    """ represents the information related to an Entity as it shall be created in LinkAhead
-
-    The following information is taken from db.Entity object during initialization or when the
-    object is updated using `update(entity)`:
-    - id
-    - role
-    - parents
-    - path
-    - name
-    - description
-    - properties
-
-    Typically, this class is used in the following way:
-    1. A SyncNode is initialized with a db.Entity object
-    2. The SyncNode object is possibly updated one or more times with further db.Entity objects
-    3. A db.Entity object is created (`export_entity`) that contains the combined information of
-       the previous db.Entity objects.
-    """
-
-    def __init__(self, entity: db.Entity, registered_identifiable: Optional[db.RecordType] =
-                 None) -> None:
-        self.id = entity.id
-        self.role = entity.role
-        self.parents = entity.parents
-        self.path = entity.path
-        self.name = entity.name
-        self.description = entity.description
-        self.properties = list(entity.properties)
-        self.uuid = uuid()
-        self.identifiable = None
-        self.registered_identifiable = registered_identifiable
-
-    def update(self, other: SyncNode) -> None:
-        if other.identifiable is not None and self.identifiable is not None:
-            assert (other.identifiable.get_representation() ==
-                    self.identifiable.get_representation())
-        for attr in ["id", "path", "role", "path", "name", "description"]:
-            if other.__getattribute__(attr) is not None:
-                if self.__getattribute__(attr) is None:
-                    self.__setattr__(attr, other.__getattribute__(attr))
-                else:
-                    assert self.__getattribute__(attr) == other.__getattribute__(attr)
-        for p in other.parents:
-            if p not in self.parents:
-                self.parents.append(p)
-        for p in other.properties:
-            if p not in self.properties:
-                self.properties.append(p)
-
-    def export_entity(self) -> db.Entity:
-        ent = None
-        if self.role == "Record":
-            ent = db.Record()
-        elif self.role == "File":
-            ent = db.File()
-        else:
-            raise RuntimeError("Invalid role")
-        for attr in ["id", "path", "role", "path", "name", "description"]:
-            ent.__setattr__(attr, self.__getattribute__(attr))
-        for p in self.parents:
-            ent.add_parent(p)
-        for p in self.properties:
-            if ent.get_property(p) is not None:
-                if ent.get_property(p).value != p.value:
-                    raise Exception()
-            else:
-                ent.add_property(p)
-        return ent
+from .sync_node import SyncNode
 
 
 class SyncGraph():
-    """ models the target structure of Entities as it shall be created by the Crawler
+    """ combines nodes in the graph based on their identity in order to create a graph of objects
+    that can either be inserted or update in(to) the remote server.
+
+    When additional information is added to a node of the graph, e.g. via `set_id_of_node`, then
+    the graph is updated accordingly:
+    - if this information implies that the node is equivalent to another node (e.g. has same ID),
+      then they are merged
+    - if knowing that one node does not exist in the remote server, then this might imply that some
+      other node also does not exist if its identity relies on the latter.
 
     The target entities are composed using the information of the entity fragments (db.Entity
     objects) of SemanticEntities. This is information like name, parents and properties and,
@@ -151,30 +88,74 @@ class SyncGraph():
         ) = self._create_reference_mapping(self.nodes)
 
         self._mark_entities_with_path_or_id()
-
-    def set_id_of_node(self, se: SyncNode, node_id: Optional[str]):
+        for node in self.nodes:
+            try:
+                identifiable = self.identifiableAdapter.get_identifiable(
+                    node, self.backward_id_referenced_by[node.uuid])
+                self.set_identifiable_of_node(node, identifiable)
+            except Exception as es:
+                print(es)
+                pass
+
+    def set_id_of_node(self, node: SyncNode, node_id: Optional[str] = None):
         """sets the ID attribute of the given SyncNode. If node_id is None, a negative Id will be
-        given indicating that the entity does not exist on the remote server"""
-        if se.id is not None:
+        given indicating that the node does not exist on the remote server"""
+        if node.id is not None:
             raise RuntimeError('cannot update id')
         if node_id is None:
-            self._treat_missing(se)
+            node_id = self._get_new_id()
+        node.id = node_id
+        if node_id in self._id_look_up:
+            self._merge_into(node, self._id_look_up[node.id])
         else:
-            se.id = node_id
-            self._treat_existing(se)
-
-    def set_identifiable_of_node(self, se: SyncNode, identifiable: Identifiable):
-        se.identifiable = identifiable
-        equivalent_se = self.get_equivalent(se)
-        if equivalent_se is not None:
-            self._merge_into(se, equivalent_se)
+            self._id_look_up[node.id] = node
+            if node.id < 0:
+                self._treat_missing(node)
+            else:
+                self._treat_existing(node)
+            for other_node in self.backward_id_references[node.uuid].union(
+                    self.forward_id_referenced_by[node.uuid]):
+                try:
+                    identifiable = self.identifiableAdapter.get_identifiable(
+                        other_node, self.backward_id_referenced_by[other_node.uuid])
+                    self.set_identifiable_of_node(other_node, identifiable)
+                except Exception as es:
+                    print(es)
+                    pass
+
+    def _get_new_id(self):
+        self._remote_missing_counter -= 1
+        return self._remote_missing_counter
+
+    def set_identifiable_of_node(self, node: SyncNode, identifiable: Identifiable):
+        node.identifiable = identifiable
+        equivalent_se = self.get_equivalent(node)
+        if equivalent_se is not None and equivalent_se is not node:
+            self._merge_into(node, equivalent_se)
             assert equivalent_se.identifiable is not None
+        else:
+            assert node.identifiable.get_representation() not in self._identifiable_look_up
+            self._identifiable_look_up[node.identifiable.get_representation()] = node
 
     def export_record_lists(self):
-        self._update_reference_values()
+        entities = []
+        node_map = {}
+        for el in self.nodes:
+            entities.append(el.export_entity())
+            node_map[id(el)] = entities[-1]
+        for ent in entities:
+            for p in ent.properties:
+                # For lists append each element that is of type Entity to flat:
+                if isinstance(p.value, list):
+                    for ii, el in enumerate(p.value):
+                        if isinstance(el, SyncNode):
+                            p.value[ii] = node_map[id(el)]
+
+                elif isinstance(p.value, SyncNode):
+                    p.value = node_map[id(p.value)]
 
-        # TODO
-        missing = [el.fragments[0] for el in self._missing.values()]
+        missing = [el for el in entities if el.id < 0]
+        existing = [el for el in entities if el.id > 0]
         # remove negative IDs
         for el in missing:
             if el.id is None:
@@ -183,18 +164,18 @@ class SyncGraph():
                 raise RuntimeError("This should not happen")  # TODO remove
             el.id = None
 
-        return (missing, [el.fragments[0] for el in self._existing.values()])
+        return (missing, existing)
 
-    def identity_relies_on_unchecked_entity(self, se: SyncNode):
+    def identity_relies_on_unchecked_entity(self, node: SyncNode):
         """
         If a record for which it could not yet be verified whether it exists in LA or not is part
         of the identifying properties, this returns True, otherwise False
         """
 
         return any([id(ent) not in self._missing and id(ent) not in self._existing
-                    for ent in self.forward_id_references[se.uuid]]
+                    for ent in self.forward_id_references[node.uuid]]
                    + [id(ent) not in self._missing and id(ent) not in self._existing
-                      for ent in self.backward_id_referenced_by[se.uuid]])
+                      for ent in self.backward_id_referenced_by[node.uuid]])
 
     def unchecked_contains_circular_dependency(self):
         """
@@ -362,31 +343,31 @@ class SyncGraph():
         backward_id_referenced_by: Dict[str, set[SyncNode]] = {}
 
         # initialize with empty lists/dict
-        for se in flat:
-            forward_references[se.uuid] = set()
-            backward_references[se.uuid] = set()
-            forward_id_references[se.uuid] = set()
-            backward_id_references[se.uuid] = set()
-            forward_id_referenced_by[se.uuid] = set()
-            backward_id_referenced_by[se.uuid] = set()
-        for se in flat:
-            for p in se.properties:
+        for node in flat:
+            forward_references[node.uuid] = set()
+            backward_references[node.uuid] = set()
+            forward_id_references[node.uuid] = set()
+            backward_id_references[node.uuid] = set()
+            forward_id_referenced_by[node.uuid] = set()
+            backward_id_referenced_by[node.uuid] = set()
+        for node in flat:
+            for p in node.properties:
                 val = p.value
                 if not isinstance(val, list):
                     val = [val]
                 for v in val:
                     if isinstance(v, SyncNode):
-                        forward_references[se.uuid].add(v)
-                        backward_references[v.uuid].add(se)
-                        if len([el.name for el in se.registered_identifiable.properties if
+                        forward_references[node.uuid].add(v)
+                        backward_references[v.uuid].add(node)
+                        if len([el.name for el in node.registered_identifiable.properties if
                                 el.name == p.name]) > 0:
-                            forward_id_references[se.uuid].add(v)
-                            backward_id_references[v.uuid].add(se)
+                            forward_id_references[node.uuid].add(v)
+                            backward_id_references[v.uuid].add(node)
                         if (v.registered_identifiable is not None and
                                 IdentifiableAdapter.referencing_entity_has_appropriate_type(
-                                se.parents, v.registered_identifiable)):
-                            forward_id_referenced_by[se.uuid].add(v)
-                            backward_id_referenced_by[v.uuid].add(se)
+                                node.parents, v.registered_identifiable)):
+                            forward_id_referenced_by[node.uuid].add(v)
+                            backward_id_referenced_by[v.uuid].add(node)
 
         return (forward_references, backward_references, forward_id_references,
                 backward_id_references, forward_id_referenced_by, backward_id_referenced_by,
@@ -400,6 +381,7 @@ class SyncGraph():
                 if self.get_equivalent(node) is not None:
                     self._merge_into(node, self.get_equivalent(node))
                 else:
+                    print(f'make {self.nodes.index(node)} exis')
                     self._id_look_up[node.id] = node
                     self._treat_existing(node)
 
@@ -408,6 +390,7 @@ class SyncGraph():
                 if self.get_equivalent(node) is not None:
                     self._merge_into(node, self.get_equivalent(node))
                 else:
+                    self._path_look_up[node.path] = node
                     try:
                         existing = cached_get_entity_by(path=node.path)
                     except EmptyUniqueQueryError:
@@ -427,21 +410,6 @@ class SyncGraph():
 
                 self.unchecked.remove(node)
 
-    def _add_any(self, entity: SyncNode, lookup):
-        """Add ``entity`` to this SemanticTarget and store in ``lookup`` cache.
-
-The entity is stored in the SyncNode's ``id``, ``path`` and ``identifiable`` lookup tables, if
-the respective attributes exist.
-
-"""
-        if entity.id is not None:
-            self._id_look_up[entity.id] = entity
-        if entity.path is not None:
-            self._path_look_up[entity.path] = entity
-        if entity.identifiable is not None:
-            self._identifiable_look_up[entity.identifiable.get_representation()] = entity
-        lookup[id(entity)] = entity
-
     def _merge_into(self, source: SyncNode, target: SyncNode):
         """ FIXME tries to merge record into newrecord
 
@@ -449,35 +417,49 @@ the respective attributes exist.
         In any case, references are bent to the newrecord object.
 
         """
+        assert source is not target
         target.update(source)
+        if id(source) in self._missing and id(target) not in self._missing:
+            self._treat_missing(target)
+        if id(source) in self._existing and id(target) not in self._existing:
+            self._treat_existing(target)
+
+        if (target.identifiable is None and not self.identity_relies_on_unchecked_entity(target)):
+            try:
+                identifiable = self.identifiableAdapter.get_identifiable(
+                    target, self.backward_id_referenced_by[target.uuid])
+                self.set_identifiable_of_node(target, identifiable)
+            except Exception as es:
+                print(es)
+                pass
 
         # update reference mappings
-        for se in self.forward_references.pop(source.uuid):
-            self.forward_references[target.uuid].add(se)
-            self.backward_references[se.uuid].remove(source)
-            self.backward_references[se.uuid].add(target)
-        for se in self.backward_references.pop(source.uuid):
-            self.backward_references[target.uuid].add(se)
-            self.forward_references[se.uuid].remove(source)
-            self.forward_references[se.uuid].add(target)
-
-        for se in self.forward_id_references.pop(source.uuid):
-            self.forward_id_references[target.uuid].add(se)
-            self.backward_id_references[se.uuid].remove(source)
-            self.backward_id_references[se.uuid].add(target)
-        for se in self.backward_id_references.pop(source.uuid):
-            self.backward_id_references[target.uuid].add(se)
-            self.forward_id_references[se.uuid].remove(source)
-            self.forward_id_references[se.uuid].add(target)
-
-        for se in self.forward_id_referenced_by.pop(source.uuid):
-            self.forward_id_referenced_by[target.uuid].add(se)
-            self.backward_id_referenced_by[se.uuid].remove(source)
-            self.backward_id_referenced_by[se.uuid].add(target)
-        for se in self.backward_id_referenced_by.pop(source.uuid):
-            self.backward_id_referenced_by[target.uuid].add(se)
-            self.forward_id_referenced_by[se.uuid].remove(source)
-            self.forward_id_referenced_by[se.uuid].add(target)
+        for node in self.forward_references.pop(source.uuid):
+            self.forward_references[target.uuid].add(node)
+            self.backward_references[node.uuid].remove(source)
+            self.backward_references[node.uuid].add(target)
+        for node in self.backward_references.pop(source.uuid):
+            self.backward_references[target.uuid].add(node)
+            self.forward_references[node.uuid].remove(source)
+            self.forward_references[node.uuid].add(target)
+
+        for node in self.forward_id_references.pop(source.uuid):
+            self.forward_id_references[target.uuid].add(node)
+            self.backward_id_references[node.uuid].remove(source)
+            self.backward_id_references[node.uuid].add(target)
+        for node in self.backward_id_references.pop(source.uuid):
+            self.backward_id_references[target.uuid].add(node)
+            self.forward_id_references[node.uuid].remove(source)
+            self.forward_id_references[node.uuid].add(target)
+
+        for node in self.forward_id_referenced_by.pop(source.uuid):
+            self.forward_id_referenced_by[target.uuid].add(node)
+            self.backward_id_referenced_by[node.uuid].remove(source)
+            self.backward_id_referenced_by[node.uuid].add(target)
+        for node in self.backward_id_referenced_by.pop(source.uuid):
+            self.backward_id_referenced_by[target.uuid].add(node)
+            self.forward_id_referenced_by[node.uuid].remove(source)
+            self.forward_id_referenced_by[node.uuid].add(target)
 
         # remove unneeded SyncNode
         self.nodes.remove(source)
@@ -496,8 +478,8 @@ the respective attributes exist.
                 el,
                 self.identifiableAdapter.get_registered_identifiable(el)))
             se_lookup[id(el)] = self.nodes[-1]
-        for se in self.nodes:
-            for p in se.properties:
+        for node in self.nodes:
+            for p in node.properties:
                 if isinstance(p.value, list):
                     for index, val in enumerate(p.value):
                         if id(val) in se_lookup:
@@ -507,18 +489,18 @@ the respective attributes exist.
                         p.value = se_lookup[id(p.value)]
 
     def _treat_missing(self, node):
-        if node.path is None and node.identifiable is None:
-            raise RuntimeError("no identifying information")
-        node.id = self._remote_missing_counter
-        self._remote_missing_counter -= 1
-        self._add_any(node, self._missing)
+        self._missing[id(node)] = node
         self.unchecked.remove(node)
 
         for other_missing in (self.backward_id_references[node.uuid].union(
                               self.forward_id_referenced_by[node.uuid])):
-            self.set_id_of_node(other_missing)
+            if other_missing in self.unchecked:
+                self.set_id_of_node(other_missing)
 
     def _treat_existing(self, node):
         assert node.id > 0
-        self._add_any(node, self._existing)
+        self._existing[id(node)] = node
         self.unchecked.remove(node)
+
+    def __repr__(self):
+        return f"SyncNode with ID={self.id}"
diff --git a/unittests/test_crawler.py b/unittests/test_crawler.py
index fdae024bf5f38a239f9274cdcb3b3801c3345c45..14167968b9816bc3f5c55b6dac8bec669e6ded8f 100644
--- a/unittests/test_crawler.py
+++ b/unittests/test_crawler.py
@@ -48,10 +48,10 @@ from caoscrawler.identifiable_adapters import (CaosDBIdentifiableAdapter,
                                                LocalStorageIdentifiableAdapter)
 from caoscrawler.scanner import (create_converter_registry, scan_directory,
                                  scan_structure_elements)
-from caoscrawler.sync_graph import SyncGraph
 from caoscrawler.stores import GeneralStore, RecordStore
 from caoscrawler.structure_elements import (DictElement, DictListElement,
                                             DictTextElement, File)
+from caoscrawler.sync_graph import SyncGraph
 from linkahead.apiutils import compare_entities
 from linkahead.cached import cache_clear
 from linkahead.exceptions import EmptyUniqueQueryError
@@ -172,6 +172,24 @@ def test_constructor():
         assert "The generalStore argument of the Crawler" in str(w[-1].message)
 
 
+@pytest.fixture
+def crawler_mocked_identifiable_retrieve():
+    crawler = Crawler()
+    # TODO use minimal setup
+    # mock retrieval of registered identifiabls: return Record with just a parent
+    crawler.identifiableAdapter.get_registered_identifiable = Mock(
+        side_effect=lambda x: db.Record().add_parent(x.parents[0].name).add_property(name='name'))
+
+    # Simulate remote server content by using the names to identify records
+    # There is only a single known Record with name A
+    crawler.identifiableAdapter.retrieve_identified_record_for_record = Mock(side_effect=partial(
+        basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
+    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable = Mock(
+        side_effect=partial(
+            basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
+    return crawler
+
+
 @pytest.mark.filterwarnings("ignore::DeprecationWarning")
 def test_deprecated_functions():
     with warnings.catch_warnings(record=True) as w:
@@ -259,10 +277,10 @@ def test_split_into_inserts_and_updates_trivial():
     crawler.split_into_inserts_and_updates(st)
 
 
-def test_split_into_inserts_and_updates_unidentified():
-    crawler = Crawler()
-    st = SyncGraph([db.Record(name="recname").add_parent("someparent")],
-                        crawler.identifiableAdapter)
+def test_split_into_inserts_and_updates_unidentified(crawler_mocked_identifiable_retrieve):
+    crawler = crawler_mocked_identifiable_retrieve
+    st = SyncGraph([db.Record().add_parent("someparent")],
+                   crawler.identifiableAdapter)
     with raises(ValueError) as err:
         crawler.split_into_inserts_and_updates(st)
     assert str(err.value).startswith("There is no identifying information.")
@@ -276,24 +294,6 @@ def basic_retrieve_by_name_mock_up(rec, referencing_entities=None, known=None):
         return None
 
 
-@pytest.fixture
-def crawler_mocked_identifiable_retrieve():
-    crawler = Crawler()
-    # TODO use minimal setup
-    # mock retrieval of registered identifiabls: return Record with just a parent
-    crawler.identifiableAdapter.get_registered_identifiable = Mock(
-        side_effect=lambda x: db.Record().add_parent(x.parents[0].name).add_property(name='name'))
-
-    # Simulate remote server content by using the names to identify records
-    # There is only a single known Record with name A
-    crawler.identifiableAdapter.retrieve_identified_record_for_record = Mock(side_effect=partial(
-        basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
-    crawler.identifiableAdapter.retrieve_identified_record_for_identifiable = Mock(
-        side_effect=partial(
-            basic_retrieve_by_name_mock_up, known={"A": db.Record(id=1111, name="A")}))
-    return crawler
-
-
 def test_split_into_inserts_and_updates_single(crawler_mocked_identifiable_retrieve):
     crawler = crawler_mocked_identifiable_retrieve
     identlist = [Identifiable(name="A", record_type="C"), Identifiable(name="B", record_type="C")]
@@ -301,10 +301,10 @@ def test_split_into_inserts_and_updates_single(crawler_mocked_identifiable_retri
                db.Record(name="B").add_parent("C")]
 
     st = SyncGraph(entlist, crawler.identifiableAdapter)
-    assert st.get_checked_equivalent(st.se[0]) is None
-    assert st.get_checked_equivalent(st.se[0]) is None
-    assert not st.identity_relies_on_unchecked_entity(st.se[0])
-    assert not st.identity_relies_on_unchecked_entity(st.se[1])
+    assert st.get_equivalent(st.nodes[0]) is None
+    assert st.get_equivalent(st.nodes[0]) is None
+    assert not st.identity_relies_on_unchecked_entity(st.nodes[0])
+    assert not st.identity_relies_on_unchecked_entity(st.nodes[1])
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
         identlist[0]).id == 1111
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
@@ -483,14 +483,14 @@ a: ([b1, b2])
     crawler = Crawler(identifiableAdapter=ident_adapter)
 
     st = SyncGraph(deepcopy([rec_a, *rec_b, *rec_c]), crawler.identifiableAdapter)
-    assert st.identity_relies_on_unchecked_entity(st.se[0]) is False
-    assert st.identity_relies_on_unchecked_entity(st.se[1])
-    assert st.identity_relies_on_unchecked_entity(st.se[2])
-    assert st.identity_relies_on_unchecked_entity(st.se[3])
-    assert st.identity_relies_on_unchecked_entity(st.se[4])
-    st.se[0].identifiable = Identifiable(path='a')  # dummy identifiable
-    st.set_missing(st.se[0])
-    assert st.identity_relies_on_unchecked_entity(st.se[1]) is False
+    assert st.identity_relies_on_unchecked_entity(st.nodes[0]) is False
+    assert st.identity_relies_on_unchecked_entity(st.nodes[1])
+    assert st.identity_relies_on_unchecked_entity(st.nodes[2])
+    assert st.identity_relies_on_unchecked_entity(st.nodes[3])
+    assert st.identity_relies_on_unchecked_entity(st.nodes[4])
+    st.nodes[0].identifiable = Identifiable(path='a')  # dummy identifiable
+    st.set_missing(st.nodes[0])
+    assert st.identity_relies_on_unchecked_entity(st.nodes[1]) is False
 
     with raises(db.apiutils.EntityMergeConflictError) as rte:
         crawler.synchronize(commit_changes=False,
@@ -709,8 +709,8 @@ def test_split_into_inserts_and_updates_backref(crawler_mocked_for_backref_test)
 
     # identifiables were not yet checked
     st = SyncGraph(entlist, crawler.identifiableAdapter)
-    assert st.get_checked_equivalent(st.se[1]) is None
-    assert st.get_checked_equivalent(st.se[0]) is None
+    assert st.get_equivalent(st.nodes[1]) is None
+    assert st.get_equivalent(st.nodes[0]) is None
     # one can be found remotely, one not
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
         identlist[0]).id == 1111
@@ -742,8 +742,8 @@ def test_split_into_inserts_and_updates_mult_backref(crawler_mocked_for_backref_
     st = SyncGraph(entlist, crawler.identifiableAdapter)
 
     identifiable = crawler.identifiableAdapter.get_identifiable(
-        st.se[0],
-        st.backward_id_referenced_by[st.se[0].uuid])
+        st.nodes[0],
+        st.backward_id_referenced_by[st.nodes[0].uuid])
     assert len(identifiable.backrefs) == 2
 
     # check the split...
@@ -766,8 +766,8 @@ def test_split_into_inserts_and_updates_diff_backref(crawler_mocked_for_backref_
     # test whether both entities are listed in the backref attribute of the identifiable
     st = SyncGraph(entlist, crawler.identifiableAdapter)
     identifiable = crawler.identifiableAdapter.get_identifiable(
-        st.se[0],
-        st.backward_id_referenced_by[st.se[0].uuid])
+        st.nodes[0],
+        st.backward_id_referenced_by[st.nodes[0].uuid])
 
     assert len(identifiable.backrefs) == 2
 
@@ -1057,7 +1057,7 @@ def test_treated_record_lookup():
     # If a Record was added using the ID, the ID must be used to identify it even though later an
     # identifiable may be passed as well
     exist.identifiable = Identifiable(name='b')
-    assert trlu.get_checked_equivalent(exist) is exist
+    assert trlu.get_equivalent(exist) is exist
 
 
 def test_merge_entity_with_identifying_reference(crawler_mocked_identifiable_retrieve):
diff --git a/unittests/test_sync_graph.py b/unittests/test_sync_graph.py
index e5321cdafeeb30b8c52cc7ad3f0d375a92265003..01efa4f4a25febfddfaf387cf2a6f344f03cefc5 100644
--- a/unittests/test_sync_graph.py
+++ b/unittests/test_sync_graph.py
@@ -30,6 +30,26 @@ from caoscrawler.sync_graph import SyncGraph, SyncNode
 from test_crawler import basic_retrieve_by_name_mock_up, mock_get_entity_by
 
 
+@pytest.fixture
+def simple_adapter():
+    # We use the reference as identifying reference in both directions. Thus the map is the same
+    # for all three categories: references, id_references and id_referenced_by
+    ident_adapter = CaosDBIdentifiableAdapter()
+    ident_adapter.register_identifiable(
+        "RT1",
+        db.RecordType().add_parent("RT1").add_property("RT2"))
+    ident_adapter.register_identifiable(
+        "RT2",
+        db.RecordType().add_parent("RT2").add_property("is_referenced_by", ["RT1", "RT3"]))
+    ident_adapter.register_identifiable(
+        "RT3",
+        db.RecordType().add_parent("RT3").add_property("a"))
+    ident_adapter.register_identifiable(
+        "RT4",
+        db.RecordType().add_parent("RT4").add_property("RT3"))
+    return ident_adapter
+
+
 def test_create_flat_list():
     a = db.Record()
     b = db.Record()
@@ -51,17 +71,15 @@ def test_create_flat_list():
 
 
 def test_create_reference_mapping():
-    a = db.Record().add_parent("A")
-    b = db.Record(id=132).add_parent("B").add_property('a', a)
-    ses = [SyncNode(a, db.RecordType().add_property("is_referenced_by", ["B"])),
-           SyncNode(b, db.RecordType().add_property("a"))]
+    a = SyncNode(db.Record().add_parent("RT1"),
+                 db.RecordType().add_property("is_referenced_by", ["RT2"]))
+    b = SyncNode(db.Record(id=132).add_parent("RT2").add_property('a', a),
+                 db.RecordType().add_property("a"))
+    ses = [a, b]
 
     (forward_references, backward_references, forward_id_references,
      backward_id_references, forward_id_referenced_by,
-     backward_id_referenced_by) = SyncGraph._create_reference_mapping(
-        ses,
-        {id(a): ses[0], id(b): ses[1]}
-    )
+     backward_id_referenced_by) = SyncGraph._create_reference_mapping(ses)
     # test initialization
     assert ses[0].uuid in forward_references
     assert ses[1].uuid in forward_references
@@ -104,7 +122,7 @@ def test_SyncGraph():
     st = SyncGraph([a], ident_adapter)
 
 
-def test_merge_into():
+def test_merge_into_trivial(simple_adapter):
     # simplest case: a -> c
     #                b
     #                (a reference c; b does not reference anything; a & b have the same target record)
@@ -112,17 +130,12 @@ def test_merge_into():
     a = db.Record().add_parent("RT1").add_property('RT2', c)
     b = db.Record().add_parent("RT1")
 
-    # We use the reference as identifying reference in both directions. Thus the map is the same
-    # for all three categories: references, id_references and id_referenced_by
-    ident_a = db.RecordType().add_parent("RT1").add_property("RT2")
-    ident_b = db.RecordType().add_parent("RT2").add_property("is_referenced_by", ["RT1"])
-    ident_adapter = CaosDBIdentifiableAdapter()
-    ident_adapter.register_identifiable("RT1", ident_a)
-    ident_adapter.register_identifiable("RT2", ident_b)
-
-    st = SyncGraph([a, b], ident_adapter)
+    st = SyncGraph([a, b], simple_adapter)
+    se_a = st.nodes[0]
+    se_b = st.nodes[1]
+    se_c = st.nodes[2]
 
-    # CHECK REFERENCE MAP:
+    # CHECK REFERENCE MAP (before merge):
     # c is referenced by a
     assert len(st.forward_references[se_a.uuid]) == 1
     se_c in st.forward_references[se_a.uuid]
@@ -182,15 +195,17 @@ def test_merge_into():
     assert len(st.backward_id_referenced_by[se_c.uuid]) == 1
     se_b in st.backward_id_referenced_by[se_c.uuid]
 
+
+def test_merge_into_simple(simple_adapter):
     # simple case: a -> c <- b (a & b reference c; a & b have the same target record)
     c = db.Record(name='c').add_parent("RT2")
     a = db.Record().add_parent("RT1").add_property('RT2', c)
     b = db.Record().add_parent("RT1").add_property('RT2', c)
 
-    st = SyncGraph([a, b], ident_adapter)
-    se_a = st.se_lookup[id(a)]
-    se_b = st.se_lookup[id(b)]
-    se_c = st.se_lookup[id(c)]
+    st = SyncGraph([a, b], simple_adapter)
+    se_a = st.nodes[0]
+    se_b = st.nodes[1]
+    se_c = st.nodes[2]
 
     # CHECK REFERENCE MAP:
     # c is referenced by a & b
@@ -276,6 +291,107 @@ def test_backward_id_referenced_by():
     assert st.nodes[1] in st.backward_id_referenced_by[st.nodes[0].uuid]
 
 
+def test_set_id_of_node(simple_adapter):
+    # setting the id should lead to the node being marked as existing
+    entlist = [db.Record().add_parent("RT1")]
+    st = SyncGraph(entlist, simple_adapter)
+    assert len(st.nodes) == 1
+    assert len(st.unchecked) == 1
+    st.set_id_of_node(st.unchecked[0], 101)
+    assert len(st.nodes) == 1
+    assert len(st.unchecked) == 0
+    assert id(st.nodes[0]) in st._existing
+
+    # setting the id with None should lead to the node being marked as missing
+    entlist = [db.Record().add_parent("RT1").add_property(name="RT2", value=1)]
+    st = SyncGraph(entlist, simple_adapter)
+    assert len(st.nodes) == 1
+    assert len(st.unchecked) == 1
+    # is automatically set in during initialization of graph
+    assert st.nodes[0].identifiable is not None
+    st.set_id_of_node(st.unchecked[0])
+    assert len(st.nodes) == 1
+    assert len(st.unchecked) == 0
+    assert id(st.nodes[0]) in st._missing
+
+    # setting the id to one that already exists should lead to a merge
+    entlist = [
+        db.Record(id=101).add_parent("RT1"),
+        db.Record().add_parent("RT1").add_property(name="a", value=1)]
+    st = SyncGraph(entlist, simple_adapter)
+    assert len(st.nodes) == 2
+    assert len(st.unchecked) == 1
+    st.set_id_of_node(st.unchecked[0], 101)
+    assert len(st.nodes) == 1
+    assert len(st.unchecked) == 0
+    assert st.nodes[0].properties[0].name == "a"
+
+    # setting the id to None should lead to depending nodes marked as missing
+    entlist = [
+        db.Record().add_parent("RT3").add_property(name="a", value=1).add_property(
+            name="RT2", value=db.Record().add_parent("RT2")),
+    ]
+    st = SyncGraph(entlist, simple_adapter)
+    assert len(st.nodes) == 2
+    assert len(st.unchecked) == 2
+    st.set_id_of_node(st.unchecked[0])
+    assert len(st.nodes) == 2
+    assert len(st.unchecked) == 0
+    assert id(st.nodes[0]) in st._missing
+    assert id(st.nodes[1]) in st._missing
+
+    # same as above but with backref
+    entlist = [
+        db.Record()
+        .add_parent("RT4")
+        .add_property(name="RT3",
+                      value=db.Record().add_parent("RT3").add_property(name="a", value=1)),
+    ]
+    st = SyncGraph(entlist, simple_adapter)
+    assert len(st.nodes) == 2
+    assert len(st.unchecked) == 2
+    assert st.unchecked[1].identifiable is not None
+    st.set_id_of_node(st.unchecked[1])
+    assert len(st.nodes) == 2
+    assert len(st.unchecked) == 0
+    assert id(st.nodes[0]) in st._missing
+    assert id(st.nodes[1]) in st._missing
+
+    # setting an id might allow to check another node that depends on the former
+    entlist = [
+        db.Record()
+        .add_parent("RT4")
+        .add_property(name="RT3",
+                      value=db.Record().add_parent("RT3").add_property(name="a", value=1)),
+    ]
+    st = SyncGraph(entlist, simple_adapter)
+    assert st.nodes[0].identifiable is None
+    assert st.nodes[1].identifiable is not None
+    st.set_id_of_node(st.unchecked[1], 111)
+    assert st.nodes[0].identifiable is not None
+    assert st.nodes[1].identifiable is not None
+
+    # same as above but going one step further: the new identifiable allows to merge that node
+    entlist = [
+        (db.Record()
+         .add_parent("RT4")
+         .add_property(name="RT3",
+                       value=db.Record().add_parent("RT3").add_property(name="a", value=1))),
+
+        (db.Record()
+         .add_parent("RT4")
+         .add_property(name="RT3", value=111))
+    ]
+    st = SyncGraph(entlist, simple_adapter)
+    assert st.nodes[0].identifiable is None
+    assert st.nodes[1].identifiable is not None
+    assert st.nodes[2].identifiable is not None
+    assert len(st.nodes) == 3
+    st.set_id_of_node(st.unchecked[2], 111)
+    assert st.nodes[0].identifiable is not None
+    assert len(st.nodes) == 2
+
+
 @patch("caoscrawler.sync_graph.cached_get_entity_by",
        new=Mock(side_effect=mock_get_entity_by))
 def test_merging():
@@ -313,15 +429,8 @@ def test_merging():
         db.File(name='101').add_parent("A").add_property('a', value=1),
         db.File(name='101').add_parent("A").add_property('a', value=1)]
     st = SyncGraph(entlist, ident_adapter)
-    assert len(st.unchecked) == 2
-    st.set_identifiable_of_node(st.nodes[0],
-                                Identifiable(record_type="A", name='101', properties={'a': 1}))
-    assert len(st.unchecked) == 2
-    st.set_identifiable_of_node(st.nodes[1],
-                                Identifiable(record_type="A", name='101', properties={'a': 1}))
-    assert len(st.unchecked) == 1
     assert len(st.nodes) == 1
-    assert st.nodes[1].id is None
+    assert st.nodes[0].id is None
     assert '101' == st.nodes[0].name
     assert "A" == st.nodes[0].parents[0].name
     assert 1 == st.nodes[0].properties[0].value
@@ -336,14 +445,13 @@ def test_merging():
         db.Record(name='a').add_parent("A").add_property('a', value=1)]
 
     st = SyncGraph(entlist, ident_adapter)
-    assert len(st.nodes) == 2
-    assert len(st.unchecked) == 1
-    st.make_identifiable(st.nodes[1])
-    assert st.merge_with_equivalent(st.nodes[1])
     assert len(st.nodes) == 1
     assert len(st.unchecked) == 0
-    for ii in range(4):
-        assert entlist[ii] in st.nodes[0].fragments
+    assert 'a' == st.nodes[0].name
+    assert "A" == st.nodes[0].parents[0].name
+    assert 1 == st.nodes[0].properties[0].value
+    assert "a" == st.nodes[0].properties[0].name
+    assert 101 == st.nodes[0].id
 
 
 def test_sync_node():