diff --git a/integrationtests/test_issues.py b/integrationtests/test_issues.py
index bf2ef2fdc0060f59dbced05053ab6ee195f05735..1de6e8077e86bea3b845ea874127fc7d5374365e 100644
--- a/integrationtests/test_issues.py
+++ b/integrationtests/test_issues.py
@@ -269,6 +269,8 @@ Campaign:
 
     crawler = Crawler(identifiableAdapter=ident)
 
+    # Add records: ds_parent references ds_child references event references
+    # basis and campaign, campaign references basis.
     basis = db.Record(name="Poseidon").add_parent(name="Basis")
     campaign = db.Record(name="POS386").add_parent(name="Campaign").add_property(name="Basis", value=basis)
     event = db.Record(name="GeoB13952").add_parent(name="Event").add_property(name="label", value="GeoB13952")
@@ -292,7 +294,8 @@ Campaign:
     basis = db.Record(name="Poseidon").add_parent(name="Basis")
     campaign = db.Record(name="POS386").add_parent(name="Campaign").add_property(name="Basis", value=basis)
     # multiples are necessary
-    recs = [ds_parent, ds_child, event, event, ds_child, event, basis, campaign]
+    # recs = [ds_parent, ds_child, event, event, ds_child, event, basis, campaign]
+    recs = [event, basis, campaign]
     
     ins, ups = crawler.synchronize(crawled_data=recs, unique_names=False)
     # There is only two datasets and one event to be inserted