Skip to content
Snippets Groups Projects
Commit bcc7888b authored by florian's avatar florian
Browse files

WIP: Add identifiable file and todos in code

parent f9025c8d
No related branches found
No related tags found
2 merge requests!53Release 0.1,!20F identifiable test
Pipeline #22873 failed
...@@ -29,7 +29,7 @@ import os ...@@ -29,7 +29,7 @@ import os
import caosdb as db import caosdb as db
from newcrawler.crawl import Crawler from newcrawler.crawl import Crawler, crawler_main
from newcrawler.converters import JSONFileConverter, DictConverter from newcrawler.converters import JSONFileConverter, DictConverter
from newcrawler.identifiable_adapters import CaosDBIdentifiableAdapter from newcrawler.identifiable_adapters import CaosDBIdentifiableAdapter
from newcrawler.structure_elements import File, JSONFile, Directory from newcrawler.structure_elements import File, JSONFile, Directory
...@@ -75,7 +75,6 @@ def usemodel(): ...@@ -75,7 +75,6 @@ def usemodel():
def test_dataset( def test_dataset(
# clear_database, # clear_database,
usemodel): usemodel):
# json_file_path = rfp("test_directories", "single_file_test_data", "testjson.json")
ident = CaosDBIdentifiableAdapter() ident = CaosDBIdentifiableAdapter()
ident.register_identifiable( ident.register_identifiable(
...@@ -86,7 +85,8 @@ def test_dataset( ...@@ -86,7 +85,8 @@ def test_dataset(
).add_parent("Person").add_property("full_name")) ).add_parent("Person").add_property("full_name"))
crawler = Crawler(debug=True, identifiableAdapter=ident) crawler = Crawler(debug=True, identifiableAdapter=ident)
crawler_definition = crawler.load_definition(os.path.join(DATADIR, "dataset_cfoods.yml")) crawler_definition = crawler.load_definition(
os.path.join(DATADIR, "dataset_cfoods.yml"))
# print(json.dumps(crawler_definition, indent=3)) # print(json.dumps(crawler_definition, indent=3))
# Load and register converter packages: # Load and register converter packages:
converter_registry = crawler.load_converters(crawler_definition) converter_registry = crawler.load_converters(crawler_definition)
...@@ -120,3 +120,15 @@ def test_dataset( ...@@ -120,3 +120,15 @@ def test_dataset(
"") == 1 "") == 1
assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH " assert db.execute_query(f"COUNT RECORD with id={dataset.id} AND WHICH REFERENCES Event WITH "
"start_datetime='2022-02-10T16:36:48+01:00'") == 1 "start_datetime='2022-02-10T16:36:48+01:00'") == 1
def test_event_update(usemodel):
ident = CaosDBIdentifiableAdapter()
ident.load_from_yaml_definition(os.path.join(DATADIR, "identifiables.yml"))
# TODO(fspreck): Use crawler_main
crawler_main()
# TODO(fspreck): crawl again manually, edit the event records in the update
# list, synchronize, and test whether the events have been updated.
license:
- name
project_type:
- name
Keyword:
- name
Taxon:
- name
Person:
- email
# - full_name
Dataset:
- title
# - DOI
Event:
- longitude
- latitude
- start_datetime
Dataspace:
- dataspace_id
Project:
- name
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment