diff --git a/.docker/Dockerfile b/.docker/Dockerfile index 166f870348910c77d08124b4e9ee15ba0ac22098..2a5dc8f54d40b12592781364c33f5c53e25aa776 100644 --- a/.docker/Dockerfile +++ b/.docker/Dockerfile @@ -34,7 +34,7 @@ RUN rm -r /git/.git # Install pycaosdb.ini for the tests RUN mv /git/.docker/tester_pycaosdb.ini /git/integrationtests/pycaosdb.ini -RUN cd /git/ && pip3 install .[h5_crawler,spss] +RUN cd /git/ && pip3 install .[h5-crawler,spss] WORKDIR /git/integrationtests # wait for server, diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c40afd2874dc89ed473cc34d65d90ecf75f33f82..3b6dca8f47847de85190bf709a8717e8efebe7e6 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -131,7 +131,7 @@ unittest_py3.8: # TODO: Use f-branch logic here - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev - - pip install .[h5_crawler,spss] + - pip install .[h5-crawler,spss] # actual test - caosdb-crawler --help - pytest --cov=caosdb -vv ./unittests @@ -168,7 +168,7 @@ unittest_py3.13: # TODO: Use f-branch logic here - pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev - (! pip install git+https://gitlab.indiscale.com/caosdb/src/caosdb-advanced-user-tools.git@dev) - - (! pip install .[h5_crawler,spss]) + - (! pip install .[h5-crawler,spss]) # actual test - (! caosdb-crawler --help) - (! pytest --cov=caosdb -vv ./unittests) diff --git a/setup.cfg b/setup.cfg index 01be1d67fbf744190bba4a7bb0e057f52ec1e3e6..848150363c42776993029c54e777f4ff6ccf72ea 100644 --- a/setup.cfg +++ b/setup.cfg @@ -44,7 +44,7 @@ console_scripts = csv_to_datamodel = caoscrawler.scripts.generators:csv_to_datamodel_main [options.extras_require] -h5_crawler = +h5-crawler = h5py >= 3.8 numpy spss = diff --git a/src/doc/converters.rst b/src/doc/converters.rst index ce172d0c0ce0411516a6ffa75538bb42d1bb104b..d7e11c235fafa1e42f53342a24255ceb0d275ed4 100644 --- a/src/doc/converters.rst +++ b/src/doc/converters.rst @@ -260,13 +260,13 @@ HDF5 Converters For treating `HDF5 Files <https://docs.hdfgroup.org/hdf5/develop/_s_p_e_c.html>`_, there are in total -four individual converters corresponding to the internal structure of HDF5 files: -the :ref:`H5FileConverter` which opens the file itself and creates further -structure elements from HDF5 groups, datasets, and included multi-dimensional -arrays that are in turn treated by the :ref:`H5GroupConverter`, the -:ref:`H5DatasetConverter`, and the :ref:`H5NdarrayConverter`, respectively. You -need to install the LinkAhead crawler with its optional ``h5crawler`` dependency -for using these converters. +four individual converters corresponding to the internal structure of HDF5 +files: the :ref:`H5FileConverter` which opens the file itself and creates +further structure elements from HDF5 groups, datasets, and included +multi-dimensional arrays that are in turn treated by the +:ref:`H5GroupConverter`, the :ref:`H5DatasetConverter`, and the +:ref:`H5NdarrayConverter`, respectively. You need to install the LinkAhead +crawler with its optional ``h5-crawler`` dependency for using these converters. The basic idea when crawling HDF5 files is to treat them very similar to :ref:`dictionaries <DictElement Converter>` in which the attributes on root,