diff --git a/README_SETUP.md b/README_SETUP.md index 0ac69cf928ae0267e60a8a5ec576b5117236e24c..e5ebd969462f7d2c28a329e2c6b6e1bab1252775 100644 --- a/README_SETUP.md +++ b/README_SETUP.md @@ -39,9 +39,11 @@ Optional h5-crawler: extroot. E.g. `sudo mount -o bind extroot ../../caosdb-deploy/profiles/empty/paths/extroot` (or whatever path the extroot of the empty profile to be used is located at). -3. Start an empty (!) CaosDB instance (with the mounted extroot). The - database will be cleared during testing, so it's important to use +3. Start (or restart) an empty (!) CaosDB instance (with the mounted extroot). + The database will be cleared during testing, so it's important to use an empty instance. + Make sure your configuration for the python caosdb module is correct and + allows to connect to the server. 4. Run `test.sh`. Note that this may modify content of the `integrationtest/extroot/` directory. ## Code Formatting diff --git a/integrationtests/test.sh b/integrationtests/test.sh index 5efd549551670d3a4581380271ac2aba4b80a10f..71af543643a35cb082f10a24440c5ea87df946c9 100755 --- a/integrationtests/test.sh +++ b/integrationtests/test.sh @@ -34,11 +34,11 @@ echo "Filling the database" echo "Testing the crawler database" python3 -m pytest test_crawler_with_cfoods.py echo "make a change" -pushd extroot +cd extroot egrep -liRZ 'A description of another example' . | xargs -0 -l sed -i -e 's/A description of another example/A description of this example/g' # remove a file to check that this does not lead to a crawler crash mv DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx DataAnalysis/2010_TestProject/2019-02-03_something/README.xlsx_back -popd +cd .. echo "run crawler" ./crawl.py / | tee $OUT # rename the moved file @@ -58,9 +58,9 @@ then fi set -e echo "undo changes" -pushd extroot +cd extroot egrep -liRZ 'A description of this example' . | xargs -0 -l sed -i -e 's/A description of this example/A description of another example/g' -popd +cd .. python3 test_table.py # TODO the following test deletes lots of the data inserted by the crawler echo "Testing im and export"