diff --git a/integrationtests/test_use_case_simple_presentation.py b/integrationtests/test_use_case_simple_presentation.py
index e757f5b1db0b146a7f252dadf8ad5288b79c6c9f..443a41e764c6dd1013c47abc017d077e6be39c20 100644
--- a/integrationtests/test_use_case_simple_presentation.py
+++ b/integrationtests/test_use_case_simple_presentation.py
@@ -39,13 +39,24 @@ from caoscrawler.crawl import crawler_main
 # from caosadvancedtools.testutils import clear_database, set_test_key
 # set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
 
+DATADIR = os.path.join(os.path.dirname(__file__), "test_data",
+                       "extroot", "use_case_simple_presentation")
+
+
+@pytest.fixture
+def clear_database():
+    # TODO(fspreck): Remove once the corresponding advancedtools function can be
+    # used.
+    ents = db.execute_query("FIND ENTITY WITH ID>99")
+    if ents:
+        ents.delete()
+
 
 def test_complete_crawler(
-        # clear_database
+        clear_database
 ):
     # Setup the data model:
-    model = parser.parse_model_from_yaml(
-        "use_case_simple_presentation/model.yml")
+    model = parser.parse_model_from_yaml(os.path.join(DATADIR, "model.yml"))
     model.sync_data_model(noquestion=True, verbose=False)
 
     # Insert the data:
@@ -60,11 +71,11 @@ def test_complete_crawler(
             dryrun=False,
             forceAllowSymlinks=False)
 
-    crawler_main("use_case_simple_presentation/extroot/",
-                 "use_case_simple_presentation/cfood.yml",
-                 "use_case_simple_presentation/identifiables.yml",
+    crawler_main(DATADIR,
+                 os.path.join(DATADIR, "cfood.yml"),
+                 os.path.join(DATADIR, "identifiables.yml"),
                  True,
-                 "use_case_simple_presentation/provenance.yml",
+                 os.path.join(DATADIR, "provenance.yml"),
                  False,
                  True,
                  "/extroot")