Skip to content
Snippets Groups Projects
Commit ce7807a9 authored by Henrik tom Wörden's avatar Henrik tom Wörden
Browse files

FIX: fix integration test and rename test file such that pytest actually executes it

parent 8e6b4fba
No related branches found
No related tags found
2 merge requests!53Release 0.1,!34F insert auth
Pipeline #28062 failed
......@@ -42,8 +42,8 @@ from caosadvancedtools.models.parser import parse_model_from_yaml
import yaml
# TODO is not yet merged in caosadvancedtools
from caosadvancedtools.testutils import clear_database, set_test_key
set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
#from caosadvancedtools.testutils import clear_database, set_test_key
# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
def rfp(*pathcomponents):
......@@ -54,6 +54,11 @@ def rfp(*pathcomponents):
return os.path.join(os.path.dirname(__file__), *pathcomponents)
@pytest.fixture
def clear_database():
db.execute_query("FIND Entity").delete()
@pytest.fixture
def usemodel():
model = parse_model_from_yaml(rfp("model.yml"))
......@@ -87,8 +92,8 @@ def ident():
def crawl_standard_test_directory(cr: Crawler,
subdir: str = "examples_article",
cfood: str = "scifolder_cfood.yml"):
cr.crawl_directory(rfp("..", "unittests", "test_directories", subdir),
rfp("..", "unittests", cfood))
cr.crawl_directory(rfp("..", "..", "unittests", "test_directories", subdir),
rfp("..", "..", "unittests", cfood))
@pytest.fixture
......@@ -105,12 +110,11 @@ def crawler_extended(ident):
# correct paths for current working directory
file_list = [r for r in cr.target_data if r.role == "File"]
for f in file_list:
f.file = rfp("..", "unittests", "test_directories",
"examples_article", f.file)
f.file = rfp("..", "..", "unittests", "test_directories", f.file)
return cr
def test_single_insertion(clear_database, usemodel, crawler):
def test_single_insertion(clear_database, usemodel, crawler, ident):
ins, ups = crawler.synchronize()
# This test also generates the file records.xml used in some of the unittesets:
......@@ -118,7 +122,7 @@ def test_single_insertion(clear_database, usemodel, crawler):
for i in reversed(range(len(res))):
if res[i].parents[0].name == "PyTestInfo":
del res[i]
filename = rfp("..", "unittests", "records.xml")
filename = rfp("..", "..", "unittests", "records.xml")
with open(filename, "w") as f:
xml = res.to_xml()
# Remove noscript and transaction benchmark:
......@@ -131,10 +135,9 @@ def test_single_insertion(clear_database, usemodel, crawler):
assert len(ups) == 0
# Do a second run on the same data, there should be no changes:
crawler = Crawler(debug=True, identifiableAdapter=ident_adapt)
crawler.copy_attributes = Mock()
crawler.crawl_directory(rfp("../unittests/test_directories", "examples_article"),
rfp("../unittests/scifolder_cfood.yml"))
crawler = Crawler(debug=True, identifiableAdapter=ident)
crawler.crawl_directory(rfp("../../unittests/test_directories", "examples_article"),
rfp("../../unittests/scifolder_cfood.yml"))
ins, ups = crawler.synchronize()
assert len(ins) == 0
assert len(ups) == 0
......@@ -271,7 +274,7 @@ def test_file_insertion_dry(clear_database, usemodel, ident):
for f in file_list:
assert f.path.endswith("README.md")
assert f.path == f.file
assert f.path[1:] == f.file
ins, ups = crawler_extended.synchronize(commit_changes=False)
assert len(ups) == 0
......@@ -304,8 +307,7 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
file_list = [r for r in cr.target_data if r.role == "File"]
for f in file_list:
f.file = rfp("..", "unittests", "test_directories",
"examples_article", f.file)
f.file = rfp("..", "..", "unittests", "test_directories", f.file)
ins2, ups2 = cr.synchronize(commit_changes=True)
assert len(ups1) == 0
assert len(ups2) == 0
......@@ -320,8 +322,7 @@ def test_file_update(clear_database, usemodel, ident, crawler_extended):
file_list = [r for r in cr2.target_data if r.role == "File"]
for f in file_list:
f.file = rfp("..", "unittests", "test_directories",
"examples_article", f.file)
f.file = rfp("..", "..", "unittests", "test_directories", f.file)
ins3, ups3 = cr2.synchronize(commit_changes=True)
assert len(ups3) == 11
......
from .crawl import Crawler
from .crawl import Crawler, SecurityMode
......@@ -6,6 +6,10 @@ Definitions:
type: Definitions
#include "description.yml"
Data: # name of the converter
type: Directory
match: (.*)
subtree:
DataAnalysis: # name of the converter
type: Directory
match: DataAnalysis
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment