diff --git a/src/caoscrawler/crawl.py b/src/caoscrawler/crawl.py
index fc9832552d3fcac8459084ee2765b8731c169f1a..2ce5eae9afbd78cbf4b78db0b152fa7578258ee9 100644
--- a/src/caoscrawler/crawl.py
+++ b/src/caoscrawler/crawl.py
@@ -295,11 +295,17 @@ class Crawler(object):
         self.crawled_data = data
         return data
 
-    def split_into_inserts_and_updates(self, st: SyncGraph):
+    def _split_into_inserts_and_updates(self, st: SyncGraph):
         """Classify nodes in the SyncGraph ``st`` with respect to their state on the server.
 
 This method iteratively checks whether those nodes exist on the remote server and creates two lists,
 one with the entities that need to be updated and the other with entities to be inserted.
+
+.. todo::
+
+        Should this be made into a public method of SyncGraph instead?  At the moment, this is a
+        purely static method that only operates on the state of ``st``.
+
         """
         entity_was_treated = True
         # st.unchecked contains Entities which could not yet be checked against the remote server
@@ -338,7 +344,7 @@ one with the entities that need to be updated and the other with entities to be
             #                 )
 
             raise RuntimeError(
-                "Could not finish split_into_inserts_and_updates. "
+                "Could not finish _split_into_inserts_and_updates. "
                 "It might be due to a circular dependency")
 
         return st.export_record_lists()
@@ -650,7 +656,7 @@ one with the entities that need to be updated and the other with entities to be
                 "use for example the Scanner to create this data."))
             crawled_data = self.crawled_data
 
-        to_be_inserted, to_be_updated = self.split_into_inserts_and_updates(
+        to_be_inserted, to_be_updated = self._split_into_inserts_and_updates(
             SyncGraph(crawled_data, self.identifiableAdapter))
 
         for el in to_be_updated:
diff --git a/unittests/test_crawler.py b/unittests/test_crawler.py
index 4e8b057e382e6353698b8b63bbcc4e648284d711..d1f713d05c908b77378092f2da5cc28836006f49 100644
--- a/unittests/test_crawler.py
+++ b/unittests/test_crawler.py
@@ -329,7 +329,7 @@ def test_remove_unnecessary_updates():
 def test_split_into_inserts_and_updates_trivial():
     crawler = Crawler()
     st = SyncGraph([], crawler.identifiableAdapter)
-    crawler.split_into_inserts_and_updates(st)
+    crawler._split_into_inserts_and_updates(st)
 
 
 def test_split_into_inserts_and_updates_simple(crawler_mocked_identifiable_retrieve):
@@ -347,7 +347,7 @@ def test_split_into_inserts_and_updates_simple(crawler_mocked_identifiable_retri
     assert crawler.identifiableAdapter.retrieve_identified_record_for_record(
         identlist[1]) is None
 
-    insert, update = crawler.split_into_inserts_and_updates(st)
+    insert, update = crawler._split_into_inserts_and_updates(st)
     assert len(insert) == 1
     assert insert[0].name == "B"
     assert len(update) == 1
@@ -370,7 +370,7 @@ def test_split_into_inserts_and_updates_with_circ(crawler_mocked_identifiable_re
 
     st = SyncGraph([a, b], crawler.identifiableAdapter)
     with pytest.raises(RuntimeError):
-        crawler.split_into_inserts_and_updates(st)
+        crawler._split_into_inserts_and_updates(st)
 
 
 def test_split_into_inserts_and_updates_with_complex(crawler_mocked_identifiable_retrieve):
@@ -389,7 +389,7 @@ def test_split_into_inserts_and_updates_with_complex(crawler_mocked_identifiable
     b.add_property("C", f)
     entlist = [a, b, g]
     st = SyncGraph(entlist, crawler.identifiableAdapter)
-    insert, update = crawler.split_into_inserts_and_updates(st)
+    insert, update = crawler._split_into_inserts_and_updates(st)
     assert len(insert) == 3
     assert "B" in [el.name for el in insert]
     assert len(update) == 1
@@ -487,7 +487,7 @@ a: ([b1, b2])
     # The Cs cannot be merged due to different identifying properties
     # The Bs cannot be merged due to different references to Cs
     with raises(ImpossibleMergeError) as rte:
-        crawler.split_into_inserts_and_updates(st)
+        crawler._split_into_inserts_and_updates(st)
     # TODO
     # assert not isinstance(rte.value, NotImplementedError), \
         # "Exception must not be NotImplementedError, but plain RuntimeError."
@@ -521,7 +521,7 @@ def test_split_into_inserts_and_updates_backref(crawler_mocked_for_backref_test)
         identlist[1]) is None
 
     # check the split...
-    insert, update = crawler.split_into_inserts_and_updates(st)
+    insert, update = crawler._split_into_inserts_and_updates(st)
     # A was found remotely and is therefore in the update list
     assert len(update) == 1
     assert update[0].name == "A"
@@ -550,7 +550,7 @@ def test_split_into_inserts_and_updates_mult_backref(crawler_mocked_for_backref_
     assert len(identifiable.backrefs) == 2
 
     # check the split...
-    insert, update = crawler.split_into_inserts_and_updates(st)
+    insert, update = crawler._split_into_inserts_and_updates(st)
     assert len(update) == 2
     assert len(insert) == 1
 
@@ -575,7 +575,7 @@ def test_split_into_inserts_and_updates_diff_backref(crawler_mocked_for_backref_
     assert len(identifiable.backrefs) == 2
 
     # check the split...
-    insert, update = crawler.split_into_inserts_and_updates(st)
+    insert, update = crawler._split_into_inserts_and_updates(st)
     assert len(update) == 2
     assert len(insert) == 1
 
diff --git a/unittests/test_sync_graph.py b/unittests/test_sync_graph.py
index 2c63cb54aceeaef98df36630ba0873cd62ebf7e3..a7c1539118a4cd87d8c46bf6e18b07b90a90361a 100644
--- a/unittests/test_sync_graph.py
+++ b/unittests/test_sync_graph.py
@@ -18,6 +18,8 @@
 # You should have received a copy of the GNU Affero General Public License
 # along with this program. If not, see <https://www.gnu.org/licenses/>.
 #
+
+import logging
 from functools import partial
 from unittest.mock import MagicMock, Mock, patch
 
@@ -631,8 +633,8 @@ def test_detect_circular_dependency(crawler_mocked_identifiable_retrieve, caplog
 
     assert Crawler.detect_circular_dependency([d]) is None
     st = SyncGraph(flat, crawler.identifiableAdapter)
-    with raises(RuntimeError):
-        _, _ = crawler.split_into_inserts_and_updates(st)
+    with pytest.raises(RuntimeError):
+        _, _ = crawler._split_into_inserts_and_updates(st)
     caplog.set_level(logging.ERROR, logger="caoscrawler.converters")
     assert "Found circular dependency" in caplog.text
     assert "\n--------\n\n> Parent: C\n\n>> Name: a\n[\'C\']" in caplog.text