diff --git a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
index 1589cba2b44afc3e2645b0ee72f91bf83b327032..eaf2690ae130cb61c8a74452e3e4e1d4fd06846a 100644
--- a/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
+++ b/integrationtests/test_data/extroot/realworld_example/dataset_cfoods.yml
@@ -318,6 +318,13 @@ Data:
                         Dataset:
                           Project: $Project
                       subtree:
+                        name_element:
+                          type: DictTextElement
+                          match_name: "name"
+                          match_value: "(?P<name>.*)"
+                          records:
+                            Project:
+                              name: $name
                         full_name_element:
                           type: DictTextElement
                           match_name: "full_name"
diff --git a/integrationtests/test_realworld_example.py b/integrationtests/test_realworld_example.py
index d72edf63871c1a602192a615403338096fdefaec..ac42db87a4c46580ae20d72781263888e13260b8 100644
--- a/integrationtests/test_realworld_example.py
+++ b/integrationtests/test_realworld_example.py
@@ -129,14 +129,75 @@ def test_dataset(
     reason="Reference properties are not updated correctly. "
     "See https://gitlab.indiscale.com/caosdb/src/caosdb-crawler/-/issues/12."
 )
-def test_event_update(usemodel):
+def test_event_update(clear_database, usemodel):
 
-    ident = CaosDBIdentifiableAdapter()
-    ident.load_from_yaml_definition(os.path.join(DATADIR, "identifiables.yml"))
+    identifiable_path = os.path.join(DATADIR, "identifiables.yml")
     crawler_definition_path = os.path.join(DATADIR, "dataset_cfoods.yml")
 
     # TODO(fspreck): Use crawler_main
-    crawler_main()
+    crawler_main(
+        os.path.join(DATADIR, 'data'),
+        crawler_definition_path,
+        identifiable_path,
+        True,
+        os.path.join(DATADIR, "provenance.yml"),
+        False,
+        True,
+        ""
+    )
+
+    old_dataset_rec = db.execute_query(
+        "FIND RECORD Dataset WHICH HAS AN EVENT WITH location='Bremen, Germany'")
+    assert len(old_dataset_rec) == 1
+    old_dataset_rec = old_dataset_rec[0]
+    assert old_dataset_rec.get_property("Event").datatype == db.LIST("Event")
+    assert len(old_dataset_rec.get_property("Event").value) == 1
+    old_event_rec = db.Record(
+        id=old_dataset_rec.get_property("Event").value[0]).retrieve()
 
     # TODO(fspreck): crawl again manually, edit the event records in the update
     # list, synchronize, and test whether the events have been updated.
+    ident = CaosDBIdentifiableAdapter()
+    ident.load_from_yaml_definition(identifiable_path)
+
+    second_crawler = Crawler(identifiableAdapter=ident)
+    crawler_definition = second_crawler.load_definition(
+        crawler_definition_path)
+    converter_registry = second_crawler.load_converters(crawler_definition)
+    records = second_crawler.start_crawling(
+        Directory("data", os.path.join(DATADIR, "data")),
+        crawler_definition,
+        converter_registry
+    )
+
+    for rec in records:
+        if rec.parents[0].name == "Event":
+            rec.get_property("longitude").value = 0.0
+            rec.get_property("latitude").value = 0.0
+            rec.get_property("location").value = "Origin"
+        elif rec.parents[0].name == "Dataset":
+            rec.get_property("Event").value[0].get_property(
+                "longitude").value = 0.0
+            rec.get_property("Event").value[0].get_property(
+                "latitude").value = 0.0
+            rec.get_property("Event").value[0].get_property(
+                "location").value = "Origin"
+    second_crawler.synchronize()
+
+    # Dataset is still the same Record, but with an updated event
+    new_dataset_rec = db.Record(id=old_dataset_rec.id).retrieve()
+    assert new_dataset_rec.get_property(
+        "title").value == old_dataset_rec.get_property("title").value
+    assert new_dataset_rec.get_property("Event").datatype == db.LIST("Event")
+    assert new_dataset_rec.get_property("Event").value is not None
+    assert len(new_dataset_rec.get_property("Event").value) == 1
+    assert new_dataset_rec.get_property("Event").value[0] != old_event_rec.id
+
+    # The event has new properties
+    new_event_rec = db.Record(
+        id=new_dataset_rec.get_property("Event").value[0]).retrieve()
+    assert new_event_rec.get_property("longitude").value == 0.0
+    assert new_event_rec.get_property("latitude").value == 0.0
+    assert new_event_rec.get_property("location").value == "Origin"
+    assert new_event_rec.get_property(
+        "start_datetime").value == old_event_rec.get_property("start_datetime").value