From c7892b55ed8edb94509dc406fbce1c7f4c1c20b6 Mon Sep 17 00:00:00 2001
From: Florian Spreckelsen <f.spreckelsen@inidscale.com>
Date: Tue, 23 Jan 2024 15:31:13 +0100
Subject: [PATCH] WIP: Further reduce non-working example

---
 integrationtests/test_issues.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/integrationtests/test_issues.py b/integrationtests/test_issues.py
index bf2ef2fd..1de6e807 100644
--- a/integrationtests/test_issues.py
+++ b/integrationtests/test_issues.py
@@ -269,6 +269,8 @@ Campaign:
 
     crawler = Crawler(identifiableAdapter=ident)
 
+    # Add records: ds_parent references ds_child references event references
+    # basis and campaign, campaign references basis.
     basis = db.Record(name="Poseidon").add_parent(name="Basis")
     campaign = db.Record(name="POS386").add_parent(name="Campaign").add_property(name="Basis", value=basis)
     event = db.Record(name="GeoB13952").add_parent(name="Event").add_property(name="label", value="GeoB13952")
@@ -292,7 +294,8 @@ Campaign:
     basis = db.Record(name="Poseidon").add_parent(name="Basis")
     campaign = db.Record(name="POS386").add_parent(name="Campaign").add_property(name="Basis", value=basis)
     # multiples are necessary
-    recs = [ds_parent, ds_child, event, event, ds_child, event, basis, campaign]
+    # recs = [ds_parent, ds_child, event, event, ds_child, event, basis, campaign]
+    recs = [event, basis, campaign]
     
     ins, ups = crawler.synchronize(crawled_data=recs, unique_names=False)
     # There is only two datasets and one event to be inserted
-- 
GitLab