From 5414650baf129949c4faae3dd67b8c4c63d9606a Mon Sep 17 00:00:00 2001
From: Florian Spreckelsen <f.spreckelsen@indiscale.com>
Date: Mon, 17 Jun 2024 15:01:30 +0200
Subject: [PATCH] MAINT: Rename `h5_crawler` back to `h5-crawler`

---
 .docker/Dockerfile     |  2 +-
 .gitlab-ci.yml         |  4 ++--
 setup.cfg              |  2 +-
 src/doc/converters.rst | 14 +++++++-------
 4 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/.docker/Dockerfile b/.docker/Dockerfile
index 166f8703..2a5dc8f5 100644
--- a/.docker/Dockerfile
+++ b/.docker/Dockerfile
@@ -34,7 +34,7 @@ RUN rm -r /git/.git
 # Install pycaosdb.ini for the tests
 RUN mv /git/.docker/tester_pycaosdb.ini /git/integrationtests/pycaosdb.ini
 
-RUN cd /git/ && pip3 install .[h5_crawler,spss]
+RUN cd /git/ && pip3 install .[h5-crawler,spss]
 
 WORKDIR /git/integrationtests
 # wait for server,
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c40afd28..3b6dca8f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -131,7 +131,7 @@ unittest_py3.8:
     # TODO: Use f-branch logic here
     - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev
     - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev
-    - pip install .[h5_crawler,spss]
+    - pip install .[h5-crawler,spss]
     # actual test
     - caosdb-crawler --help
     - pytest --cov=caosdb -vv ./unittests
@@ -168,7 +168,7 @@ unittest_py3.13:
     # TODO: Use f-branch logic here
     - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev
     - (! pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev)
-    - (! pip install .[h5_crawler,spss])
+    - (! pip install .[h5-crawler,spss])
     # actual test
     - (! caosdb-crawler --help)
     - (! pytest --cov=caosdb -vv ./unittests)
diff --git a/setup.cfg b/setup.cfg
index 01be1d67..84815036 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -44,7 +44,7 @@ console_scripts =
   csv_to_datamodel = caoscrawler.scripts.generators:csv_to_datamodel_main
 
 [options.extras_require]
-h5_crawler =
+h5-crawler =
            h5py >= 3.8
            numpy
 spss =
diff --git a/src/doc/converters.rst b/src/doc/converters.rst
index ce172d0c..d7e11c23 100644
--- a/src/doc/converters.rst
+++ b/src/doc/converters.rst
@@ -260,13 +260,13 @@ HDF5 Converters
 
 For treating `HDF5 Files
 <https://docs.hdfgroup.org/hdf5/develop/_s_p_e_c.html>`_, there are in total
-four individual converters corresponding to the internal structure of HDF5 files:
-the :ref:`H5FileConverter` which opens the file itself and creates further
-structure elements from HDF5 groups, datasets, and included multi-dimensional
-arrays that are in turn treated by the :ref:`H5GroupConverter`, the
-:ref:`H5DatasetConverter`, and the :ref:`H5NdarrayConverter`, respectively. You
-need to install the LinkAhead crawler with its optional ``h5crawler`` dependency
-for using these converters.
+four individual converters corresponding to the internal structure of HDF5
+files: the :ref:`H5FileConverter` which opens the file itself and creates
+further structure elements from HDF5 groups, datasets, and included
+multi-dimensional arrays that are in turn treated by the
+:ref:`H5GroupConverter`, the :ref:`H5DatasetConverter`, and the
+:ref:`H5NdarrayConverter`, respectively. You need to install the LinkAhead
+crawler with its optional ``h5-crawler`` dependency for using these converters.
 
 The basic idea when crawling HDF5 files is to treat them very similar to
 :ref:`dictionaries <DictElement Converter>` in which the attributes on root,
-- 
GitLab