Skip to content
Snippets Groups Projects
Commit 2cc517ee authored by Henrik tom Wörden's avatar Henrik tom Wörden
Browse files

move

parent a1d7d7e2
No related branches found
No related tags found
2 merge requests!53Release 0.1,!18Add integrationtests based on a real world example
Showing
with 42 additions and 14 deletions
1. Clear database (see clear_database.py)
2. Insert model (see insert_model.py)
3. Run test.py
1. Mount test_data/extroot as extroot folder in the CaosDB server
2. use an empty server
......@@ -34,10 +34,44 @@ from newcrawler.converters import JSONFileConverter, DictConverter
from newcrawler.identifiable_adapters import CaosDBIdentifiableAdapter
from newcrawler.structure_elements import File, JSONFile, Directory
from caosadvancedtools.testutils import clear_database, set_test_key
import sys
def test_dataset():
crawler_definition_path = "./dataset_cfoods.yml"
#json_file_path = rfp("test_directories", "single_file_test_data", "testjson.json")
from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
def rfp(*pathcomponents):
"""
Return full path.
Shorthand convenience function.
"""
return os.path.join(os.path.dirname(__file__), *pathcomponents)
DATADIR = rfp("..", "test_data", "extroot", "realworld_example")
@pytest.fixture
def usemodel():
# First load dataspace data model
dataspace_definitions = parse_model_from_json_schema(
os.path.join(DATADIR, "schema", "dataspace.schema.json"))
dataspace_definitions.sync_data_model(noquestion=True)
# Then general dataset definitions
dataset_definitions = parse_model_from_json_schema(
os.path.join(DATADIR, "schema", "dataset.schema.json"))
dataset_definitions.sync_data_model(noquestion=True)
# Finally, add inheritances as defined in yaml
dataset_inherits = parse_model_from_yaml(
os.path.join(DATADIR, "schema", "dataset-inheritance.yml"))
dataset_inherits.sync_data_model(noquestion=True)
def test_dataset(clear_database, usemodel):
# json_file_path = rfp("test_directories", "single_file_test_data", "testjson.json")
ident = CaosDBIdentifiableAdapter()
ident.register_identifiable(
......@@ -48,15 +82,14 @@ def test_dataset():
).add_parent("Person").add_property("full_name"))
crawler = Crawler(debug=True, identifiableAdapter=ident)
crawler_definition = crawler.load_definition(crawler_definition_path)
#print(json.dumps(crawler_definition, indent=3))
crawler_definition = crawler.load_definition(os.path.join(DATADIR, "dataset_cfoods.yml"))
# print(json.dumps(crawler_definition, indent=3))
# Load and register converter packages:
converter_registry = crawler.load_converters(crawler_definition)
# print("DictIntegerElement" in converter_registry)
records = crawler.start_crawling(
Directory('data',
"data"),
Directory(os.path.join(DATADIR, 'data'), "data"),
crawler_definition,
converter_registry
)
......@@ -67,7 +100,3 @@ def test_dataset():
# print(subc)
# print(records)
ins, ups = crawler.synchronize()
if __name__ == "__main__":
test_dataset()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment