Skip to content
Snippets Groups Projects
Commit f7628859 authored by Alexander Schlemmer's avatar Alexander Schlemmer
Browse files

TST: added failing test

parent f6eec1b4
No related branches found
No related tags found
2 merge requests!53Release 0.1,!45Scalars in cfood definitions
Pipeline #28979 failed
# This is a test cfood for:
# https://gitlab.com/caosdb/caosdb-crawler/-/issues/9
Data: # name of the converter
type: Directory
match: (.*)
subtree:
DataAnalysis: # name of the converter
type: Directory
match: DataAnalysis
records:
RecordThatGetsParentsLater:
someId: 23 # <- this scalar causes problems
#!/bin/python
# Tests for:
# https://gitlab.com/caosdb/caosdb-crawler/-/issues/9
# A. Schlemmer, 06/2021
from caoscrawler.crawl import Crawler, SecurityMode
from caoscrawler.structure_elements import File, DictTextElement, DictListElement
from caoscrawler.identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
from simulated_server_data import full_data
from functools import partial
from copy import deepcopy
from unittest.mock import patch
import caosdb.common.models as dbmodels
from unittest.mock import MagicMock, Mock
from os.path import join, dirname, basename
import yaml
import caosdb as db
from caosdb.apiutils import compare_entities
import pytest
from pytest import raises
# The main function that is affected by this issue:
from caoscrawler.converters import handle_value
# We need the store for the above function
from caoscrawler.stores import GeneralStore
def rfp(*pathcomponents):
"""
Return full path.
Shorthand convenience function.
"""
return join(dirname(__file__), *pathcomponents)
def dircheckstr(*pathcomponents):
"""
Return the debug tree identifier for a given path.
"""
return ("caoscrawler.structure_elements.Directory: " + basename(
join(*pathcomponents)) + ", " + rfp(
"test_directories", "examples_article", *pathcomponents))
@pytest.fixture
def crawler():
crawler = Crawler(debug=True)
crawler.crawl_directory(rfp("test_directories", "examples_article"),
rfp("cfoods_scalar.yml"))
return crawler
def test_handle_value():
# Note that we will need this store only, if we also want to test variables substitution:
store = GeneralStore()
# This one should work:
assert handle_value("bla", store) == ("bla", "single")
# These ones will currently fail:
assert handle_value(4, store) == (4, "single")
assert handle_value(4.2, store) == (4.2, "single")
assert handle_value(True, store) == (True, "single")
# List test:
assert handle_value([4, 3, 2], store) == ([4, 3, 2], "single")
def test_record_structure_generation(crawler):
pass
# Copied from test_tool.py as an example
# subd = crawler.debug_tree[dircheckstr("DataAnalysis")]
# subc = crawler.debug_metadata["copied"][dircheckstr("DataAnalysis")]
# assert len(subd) == 2
# # variables store on Data Analysis node of debug tree
# assert len(subd[0]) == 2
# # record store on Data Analysis node of debug tree
# assert len(subd[1]) == 0
# assert len(subc) == 2
# assert len(subc[0]) == 2
# assert len(subc[1]) == 0
# # The data analysis node creates one variable for the node itself:
# assert subd[0]["DataAnalysis"] == "examples_article/DataAnalysis"
# assert subc[0]["DataAnalysis"] is False
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment