diff --git a/unittests/test_crawler.py b/unittests/test_crawler.py
index 920728c418479cd594e894e990d5f15693be589d..fdb497f2f4457aa8ddc670e5e593270db5eca074 100644
--- a/unittests/test_crawler.py
+++ b/unittests/test_crawler.py
@@ -38,6 +38,7 @@ import linkahead as db
 import linkahead.common.models as dbmodels
 import pytest
 import yaml
+from caosadvancedtools.models.parser import parse_model_from_string
 from caoscrawler.crawl import (Crawler, SecurityMode, TreatedRecordLookUp,
                                _treat_deprecated_prefix, crawler_main,
                                split_restricted_path)
@@ -52,7 +53,6 @@ from caoscrawler.stores import GeneralStore, RecordStore
 from caoscrawler.structure_elements import (DictElement, DictListElement,
                                             DictTextElement, File)
 from linkahead.apiutils import compare_entities
-from caosadvancedtools.models.parser import parse_model_from_string
 from linkahead.cached import cache_clear
 from linkahead.exceptions import EmptyUniqueQueryError
 from pytest import raises
@@ -1144,3 +1144,20 @@ def test_treated_record_lookup():
     fi2 = db.File(path='b')
     trlu.add(fi2)
     assert trlu.get_any(db.File(path='b'), Identifiable(name='c')) is fi2
+
+
+def test_merge_entity_with_identifying_reference(crawler_mocked_identifiable_retrieve):
+    # When one python object representing a record is merged into another python object
+    # representing the same record, the former object can be forgotten and references from it to
+    # other records must not play a role
+    crawler = crawler_mocked_identifiable_retrieve
+    crawler.identifiableAdapter.get_registered_identifiable = Mock(
+        side_effect=lambda x: db.Record().add_parent('C').add_property(name='name') if
+        x.parents[0].name == "C" else
+        db.Record().add_parent('D').add_property(name='is_referenced_by', value="*")
+    )
+    a = db.Record(name='a').add_parent("D")
+    b = db.Record(name='b').add_parent("C")
+    c = db.Record(name='b').add_parent("C").add_property(name="C", value=a)
+    flat = [a, c, b]
+    _, _ = crawler.split_into_inserts_and_updates(flat)