Skip to content
Snippets Groups Projects
Commit 4c91ea57 authored by Henrik tom Woerden's avatar Henrik tom Woerden
Browse files

Merge branch 'allow_grouped_identifiables' into 'master'

Restructure CFood and Crawler

See merge request bmp-caosdb/caosdb-advanced-user-tools!6
parents dedf5146 8efdc037
No related branches found
No related tags found
No related merge requests found
Showing
with 12 additions and 14 deletions
......@@ -7,12 +7,13 @@ RUN git clone https://gitlab.gwdg.de/bmp-caosdb/caosdb-pylib.git && \
cd caosdb-pylib && pip3 install .
RUN git clone https://gitlab.gwdg.de/bmp-caosdb/caosdb-models.git && \
cd caosdb-models && pip3 install .
ADD https://gitlab.com/api/v4/projects/13601752/repository/branches \
scifolder_version.txt
RUN git clone https://gitlab.com/henrik_indiscale/scifolder.git && \
cd scifolder && pip3 install .
ADD https://gitlab.com/api/v4/projects/13601752/repository/branches/restructure_cfood scifolder_version.json
RUN git clone -b restructure_cfood \
https://gitlab.com/henrik_indiscale/scifolder.git && \
cd scifolder && git checkout 403793fdfde511d53 && pip3 install .
COPY . /git
RUN rm -r /git/.git && mv /git/.docker/pycaosdb.ini /git/integrationtests
RUN rm -r /git/.git \
&& mv /git/.docker/pycaosdb.ini /git/integrationtests/full_test
RUN cd /git && pip3 install .
WORKDIR /git/integrationtests/
WORKDIR /git/integrationtests/full_test
CMD /wait-for-it.sh caosdb-server:10443 -t 120 -- ./test.sh
......@@ -18,7 +18,7 @@ services:
source: "$EXEPATH/.docker/cert"
target: /opt/caosdb/cert
- type: bind
source: "$EXEPATH/integrationtests/extroot"
source: "$EXEPATH/integrationtests/full_test/extroot"
target: /opt/caosdb/mnt/extroot
read_only: true
ports:
......
......@@ -4,7 +4,7 @@ test_server_side_scripting.bin_dir=../caosdb-server/test_scripting/bin/
[Connection]
url=https://caosdb-server:10443
username=admin
cacert=../.docker/cert/caosdb.cert.pem
cacert=../../.docker/cert/caosdb.cert.pem
#cacert=/etc/ssl/cert.pem
debug=0
......
......@@ -54,10 +54,7 @@ if __name__ == "__main__":
files = Crawler.query_files(args.path)
print("Query done...")
config = db.configuration.get_config()
c = Crawler(food=[
AnalysisCFood(use_cache=True, access=access),
ExperimentCFood(use_cache=True, access=access),
PublicationCFood(use_cache=True, access=access),
SimulationCFood(use_cache=True, access=access),
])
c = Crawler(use_cache=True, access=access,
food=[AnalysisCFood, ExperimentCFood,
PublicationCFood, SimulationCFood, ])
c.crawl(files)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment