Skip to content
Snippets Groups Projects
Commit 1f99de9e authored by Henrik tom Wörden's avatar Henrik tom Wörden
Browse files

TST: fix test

parent 2cc517ee
Branches
Tags
2 merge requests!53Release 0.1,!18Add integrationtests based on a real world example
......@@ -40,6 +40,7 @@ import pytest
from caosadvancedtools.models.parser import parse_model_from_yaml
import yaml
# TODO is not yet merged in caosadvancedtools
from caosadvancedtools.testutils import clear_database, set_test_key
set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
......
......@@ -33,12 +33,14 @@ from newcrawler.crawl import Crawler
from newcrawler.converters import JSONFileConverter, DictConverter
from newcrawler.identifiable_adapters import CaosDBIdentifiableAdapter
from newcrawler.structure_elements import File, JSONFile, Directory
import pytest
from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
from caosadvancedtools.testutils import clear_database, set_test_key
#from caosadvancedtools.testutils import clear_database, set_test_key
import sys
from caosadvancedtools.models.parser import parse_model_from_json_schema, parse_model_from_yaml
set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
# TODO is not yet merged in caosadvancedtools
# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
def rfp(*pathcomponents):
......@@ -70,7 +72,9 @@ def usemodel():
dataset_inherits.sync_data_model(noquestion=True)
def test_dataset(clear_database, usemodel):
def test_dataset(
# clear_database,
usemodel):
# json_file_path = rfp("test_directories", "single_file_test_data", "testjson.json")
ident = CaosDBIdentifiableAdapter()
......@@ -89,7 +93,7 @@ def test_dataset(clear_database, usemodel):
# print("DictIntegerElement" in converter_registry)
records = crawler.start_crawling(
Directory(os.path.join(DATADIR, 'data'), "data"),
Directory("data", os.path.join(DATADIR, 'data')),
crawler_definition,
converter_registry
)
......@@ -100,3 +104,19 @@ def test_dataset(clear_database, usemodel):
# print(subc)
# print(records)
ins, ups = crawler.synchronize()
dataspace = db.execute_query("FIND RECORD Dataspace WITH name=35 AND dataspace_id=20002 AND "
"archived=FALSE AND url='https://datacloud.de/index.php/f/7679'"
" AND Person", unique=True)
assert dataspace.get_property("start_date").value == "2022-03-01"
db.execute_query("FIND RECORD Person with full_name='Max Schmitt' AND"
" given_name='Max'", unique=True)
dataset = db.execute_query(f"FIND RECORD Dataset with Dataspace={dataspace.id} AND title="
"'Random numbers created on a random autumn day in a random office'"
"", unique=True)
assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Person WITH full_name="
"'Alexa Nozone' AND WHICH REFERENCES Person WITH full_name='Max Schmitt'"
"") == 1
assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH "
"start_datetime='2022-02-10T16:36:48+01:00'") == 1
......@@ -4,8 +4,8 @@
"archived": false,
"coordinator": {
"full_name": "Max Schmitt",
"firstname": "Max",
"lastname": "Schmitt",
"given_name": "Max",
"family_name": "Schmitt",
"email": "max.schmitt@email.de"
},
"start_date": "2022-03-01",
......
......@@ -4,8 +4,7 @@
"Event": [
{
"longitude": 18.445078548041533,
"datetime": "2022-02-10T16:36:48+01:00",
"start_date": "2021-10-01",
"start_datetime": "2022-02-10T16:36:48+01:00",
"latitude": 53.10833068997861,
"elevation": 2,
"location": "Bremen, Germany"
......
......@@ -142,7 +142,7 @@
"required": [
"longitude",
"latitude",
"start_date"
"start_datetime"
]
}
},
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment