Skip to content
Snippets Groups Projects
Commit 942d3e9b authored by Daniel Hornung's avatar Daniel Hornung
Browse files

Merge branch 'f-more-jsonschema-export' into 'dev'

F more jsonschema export

See merge request !84
parents fbebdfd8 ce21eb96
Branches
Tags
2 merge requests!89ENH: JsonSchemaExporter accepts do_not_create parameter.,!84F more jsonschema export
Pipeline #43566 passed
...@@ -36,10 +36,13 @@ class JsonSchemaExporter: ...@@ -36,10 +36,13 @@ class JsonSchemaExporter:
""" """
def __init__(self, additional_properties: bool = True, def __init__(self, additional_properties: bool = True,
name_and_description_in_properties: bool = False, name_property_for_new_records: bool = False,
description_property_for_new_records: bool = False,
additional_options_for_text_props: dict = None, additional_options_for_text_props: dict = None,
units_in_description: bool = True, units_in_description: bool = True,
do_not_create: List[str] = None, do_not_create: List[str] = None,
do_not_retrieve: List[str] = None,
no_remote: bool = False,
): ):
"""Set up a JsonSchemaExporter, which can then be applied on RecordTypes. """Set up a JsonSchemaExporter, which can then be applied on RecordTypes.
...@@ -48,9 +51,12 @@ class JsonSchemaExporter: ...@@ -48,9 +51,12 @@ class JsonSchemaExporter:
additional_properties : bool, optional additional_properties : bool, optional
Whether additional properties will be admitted in the resulting Whether additional properties will be admitted in the resulting
schema. Optional, default is True. schema. Optional, default is True.
name_and_description_in_properties : bool, optional name_property_for_new_records : bool, optional
Whether objects that are generated from reference properties shall have a `name` and Whether objects shall generally have a `name` property in the generated schema.
`description` property in the generated schema. Optional, default is False. Optional, default is False.
description_property_for_new_records : bool, optional
Whether objects shall generally have a `description` property in the generated schema.
Optional, default is False.
additional_options_for_text_props : dict, optional additional_options_for_text_props : dict, optional
Dictionary containing additional "pattern" or "format" options for Dictionary containing additional "pattern" or "format" options for
string-typed properties. Optional, default is empty. string-typed properties. Optional, default is empty.
...@@ -63,17 +69,28 @@ class JsonSchemaExporter: ...@@ -63,17 +69,28 @@ class JsonSchemaExporter:
A list of RedcordType names, for which there should be no option A list of RedcordType names, for which there should be no option
to create them. Instead, only the choice of existing elements should to create them. Instead, only the choice of existing elements should
be given. be given.
do_not_retrieve : list[str]
A list of RedcordType names, for which no Records shall be retrieved. Instead, only an
object description should be given. If this list overlaps with the `do_not_create`
parameter, the behavior is undefined.
no_remote : bool
If True, do not attempt to connect to a LinkAhead server at all. Default is False.
""" """
if not additional_options_for_text_props: if not additional_options_for_text_props:
additional_options_for_text_props = {} additional_options_for_text_props = {}
if not do_not_create: if not do_not_create:
do_not_create = [] do_not_create = []
if not do_not_retrieve:
do_not_retrieve = []
self._additional_properties = additional_properties self._additional_properties = additional_properties
self._name_and_description_in_properties = name_and_description_in_properties self._name_property_for_new_records = name_property_for_new_records
self._description_property_for_new_records = description_property_for_new_records
self._additional_options_for_text_props = additional_options_for_text_props self._additional_options_for_text_props = additional_options_for_text_props
self._units_in_description = units_in_description self._units_in_description = units_in_description
self._do_not_create = do_not_create self._do_not_create = do_not_create
self._do_not_retrieve = do_not_retrieve
self._no_remote = no_remote
@staticmethod @staticmethod
def _make_required_list(rt: db.RecordType): def _make_required_list(rt: db.RecordType):
...@@ -151,14 +168,21 @@ class JsonSchemaExporter: ...@@ -151,14 +168,21 @@ class JsonSchemaExporter:
prop_name = prop.datatype prop_name = prop.datatype
if isinstance(prop.datatype, db.Entity): if isinstance(prop.datatype, db.Entity):
prop_name = prop.datatype.name prop_name = prop.datatype.name
if prop_name in self._do_not_retrieve:
values = []
else:
values = self._retrieve_enum_values(f"RECORD '{prop_name}'") values = self._retrieve_enum_values(f"RECORD '{prop_name}'")
if prop_name in self._do_not_create: if prop_name in self._do_not_create:
# Only a simple list of values # Only a simple list of values
json_prop["enum"] = values json_prop["enum"] = values
else:
if self._no_remote:
rt = prop.datatype
else: else:
rt = db.execute_query(f"FIND RECORDTYPE WITH name='{prop_name}'", rt = db.execute_query(f"FIND RECORDTYPE WITH name='{prop_name}'",
unique=True) unique=True)
subschema = self._make_segment_from_recordtype(rt) subschema = self._make_segment_from_recordtype(rt)
if values:
subschema["title"] = "Create new" subschema["title"] = "Create new"
json_prop["oneOf"] = [ json_prop["oneOf"] = [
{ {
...@@ -167,6 +191,8 @@ class JsonSchemaExporter: ...@@ -167,6 +191,8 @@ class JsonSchemaExporter:
}, },
subschema subschema
] ]
else:
json_prop = subschema
else: else:
raise ValueError( raise ValueError(
...@@ -203,8 +229,10 @@ class JsonSchemaExporter: ...@@ -203,8 +229,10 @@ class JsonSchemaExporter:
return prop return prop
@staticmethod def _retrieve_enum_values(self, role: str):
def _retrieve_enum_values(role: str):
if self._no_remote:
return []
possible_values = db.execute_query(f"SELECT name, id FROM {role}") possible_values = db.execute_query(f"SELECT name, id FROM {role}")
...@@ -228,8 +256,9 @@ class JsonSchemaExporter: ...@@ -228,8 +256,9 @@ class JsonSchemaExporter:
schema["additionalProperties"] = self._additional_properties schema["additionalProperties"] = self._additional_properties
props = OrderedDict() props = OrderedDict()
if self._name_and_description_in_properties: if self._name_property_for_new_records:
props["name"] = self._make_text_property("The name of the Record to be created") props["name"] = self._make_text_property("The name of the Record to be created")
if self._description_property_for_new_records:
props["description"] = self._make_text_property( props["description"] = self._make_text_property(
"The description of the Record to be created") "The description of the Record to be created")
...@@ -274,10 +303,14 @@ class JsonSchemaExporter: ...@@ -274,10 +303,14 @@ class JsonSchemaExporter:
def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = True, def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = True,
name_and_description_in_properties: bool = False, name_property_for_new_records: bool = False,
description_property_for_new_records: bool = False,
additional_options_for_text_props: Optional[dict] = None, additional_options_for_text_props: Optional[dict] = None,
units_in_description: bool = True, units_in_description: bool = True,
do_not_create: List[str] = None): do_not_create: List[str] = None,
do_not_retrieve: List[str] = None,
no_remote: bool = False,
):
"""Create a jsonschema from a given RecordType that can be used, e.g., to """Create a jsonschema from a given RecordType that can be used, e.g., to
validate a json specifying a record of the given type. validate a json specifying a record of the given type.
...@@ -291,9 +324,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T ...@@ -291,9 +324,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T
additional_properties : bool, optional additional_properties : bool, optional
Whether additional properties will be admitted in the resulting Whether additional properties will be admitted in the resulting
schema. Optional, default is True. schema. Optional, default is True.
name_and_description_in_properties : bool, optional name_property_for_new_records : bool, optional
Whether objects that are generated from reference properties shall have a `name` and Whether objects shall generally have a `name` property in the generated schema. Optional,
`description` property in the generated schema. Optional, default is False. default is False.
description_property_for_new_records : bool, optional
Whether objects shall generally have a `description` property in the generated schema.
Optional, default is False.
additional_options_for_text_props : dict, optional additional_options_for_text_props : dict, optional
Dictionary containing additional "pattern" or "format" options for Dictionary containing additional "pattern" or "format" options for
string-typed properties. Optional, default is empty. string-typed properties. Optional, default is empty.
...@@ -306,6 +342,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T ...@@ -306,6 +342,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T
A list of RedcordType names, for which there should be no option A list of RedcordType names, for which there should be no option
to create them. Instead, only the choice of existing elements should to create them. Instead, only the choice of existing elements should
be given. be given.
do_not_retrieve : list[str]
A list of RedcordType names, for which no Records shall be retrieved. Instead, only an
object description should be given. If this list overlaps with the `do_not_create`
parameter, the behavior is undefined.
no_remote : bool
If True, do not attempt to connect to a LinkAhead server at all. Default is False.
Returns Returns
------- -------
...@@ -316,10 +358,13 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T ...@@ -316,10 +358,13 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T
exporter = JsonSchemaExporter( exporter = JsonSchemaExporter(
additional_properties=additional_properties, additional_properties=additional_properties,
name_and_description_in_properties=name_and_description_in_properties, name_property_for_new_records=name_property_for_new_records,
description_property_for_new_records=description_property_for_new_records,
additional_options_for_text_props=additional_options_for_text_props, additional_options_for_text_props=additional_options_for_text_props,
units_in_description=units_in_description, units_in_description=units_in_description,
do_not_create=do_not_create, do_not_create=do_not_create,
do_not_retrieve=do_not_retrieve,
no_remote=no_remote,
) )
return exporter.recordtype_to_json_schema(rt) return exporter.recordtype_to_json_schema(rt)
...@@ -408,7 +453,7 @@ out : dict ...@@ -408,7 +453,7 @@ out : dict
"properties": sub_schemas, "properties": sub_schemas,
"required": required, "required": required,
"additionalProperties": False, "additionalProperties": False,
"$schema": "https://json-schema.org/draft/2019-09/schema", "$schema": "https://json-schema.org/draft/2020-12/schema",
} }
return result return result
...@@ -175,8 +175,8 @@ def loadpath(path, include, exclude, prefix, dryrun, forceAllowSymlinks, caosdbi ...@@ -175,8 +175,8 @@ def loadpath(path, include, exclude, prefix, dryrun, forceAllowSymlinks, caosdbi
for f in files: for f in files:
totalsize += f.size totalsize += f.size
logger.info("Made in total {} new files with a combined size of {} " logger.info(
"accessible.".format(len(files), convert_size(totalsize))) f"Made new files accessible: {len(files)}, combined size: {convert_size(totalsize)} ")
return return
......
...@@ -27,6 +27,8 @@ import json ...@@ -27,6 +27,8 @@ import json
import linkahead as db import linkahead as db
import caosadvancedtools.json_schema_exporter as jsex import caosadvancedtools.json_schema_exporter as jsex
from collections import OrderedDict
from jsonschema import FormatChecker, validate, ValidationError from jsonschema import FormatChecker, validate, ValidationError
from pytest import raises from pytest import raises
from unittest.mock import Mock, patch from unittest.mock import Mock, patch
...@@ -85,6 +87,8 @@ def _mock_execute_query(query_string, unique=False, **kwargs): ...@@ -85,6 +87,8 @@ def _mock_execute_query(query_string, unique=False, **kwargs):
return all_files return all_files
else: else:
print(f"Query string: {query_string}") print(f"Query string: {query_string}")
if unique is True:
return db.Entity()
return db.Container() return db.Container()
...@@ -108,7 +112,8 @@ def test_empty_rt(): ...@@ -108,7 +112,8 @@ def test_empty_rt():
assert len(schema["required"]) == 0 assert len(schema["required"]) == 0
assert schema["additionalProperties"] is False assert schema["additionalProperties"] is False
schema = rtjs(rt, name_and_description_in_properties=True) schema = rtjs(rt, name_property_for_new_records=True,
description_property_for_new_records=True)
assert len(schema["properties"]) == 2 assert len(schema["properties"]) == 2
assert "name" in schema["properties"] assert "name" in schema["properties"]
...@@ -435,8 +440,8 @@ def test_rt_with_references(): ...@@ -435,8 +440,8 @@ def test_rt_with_references():
rt = db.RecordType() rt = db.RecordType()
rt.add_property(name="RefProp", datatype=db.LIST("OtherType")) rt.add_property(name="RefProp", datatype=db.LIST("OtherType"))
schema = rtjs(rt, additional_properties=False, schema = rtjs(rt, additional_properties=False, name_property_for_new_records=True,
name_and_description_in_properties=True) description_property_for_new_records=True)
assert schema["additionalProperties"] is False assert schema["additionalProperties"] is False
assert "name" in schema["properties"] assert "name" in schema["properties"]
assert schema["properties"]["name"]["type"] == "string" assert schema["properties"]["name"]["type"] == "string"
...@@ -714,17 +719,23 @@ RT2: ...@@ -714,17 +719,23 @@ RT2:
# Merge the schemata # Merge the schemata
merged_list = jsex.merge_schemas([schema_RT1, schema_RT2]) merged_list = jsex.merge_schemas([schema_RT1, schema_RT2])
with raises(ValidationError):
validate({}, merged_list)
assert merged_list["type"] == "object" assert merged_list["type"] == "object"
assert merged_list["properties"]["RT1"]["title"] == "RT1" assert merged_list["properties"]["RT1"]["title"] == "RT1"
assert merged_list["properties"]["RT2"]["properties"]["some_text"]["type"] == "string" assert merged_list["properties"]["RT2"]["properties"]["some_text"]["type"] == "string"
merged_dict = jsex.merge_schemas({"schema1": schema_RT1, "schema2": schema_RT2}) merged_dict = jsex.merge_schemas({"schema1": schema_RT1, "schema2": schema_RT2})
with raises(ValidationError):
validate({}, merged_dict)
assert merged_dict["type"] == "object" assert merged_dict["type"] == "object"
assert merged_dict["properties"]["schema1"]["title"] == "RT1" assert merged_dict["properties"]["schema1"]["title"] == "RT1"
assert merged_dict["properties"]["schema2"]["properties"]["some_text"]["type"] == "string" assert merged_dict["properties"]["schema2"]["properties"]["some_text"]["type"] == "string"
# Make an array # Make an array
array = jsex.make_array(schema_RT1) array = jsex.make_array(schema_RT1)
with raises(ValidationError):
validate({}, array)
assert array["type"] == "array" assert array["type"] == "array"
assert array["items"] == schema_RT1 assert array["items"] == schema_RT1
...@@ -773,3 +784,42 @@ RT5: ...@@ -773,3 +784,42 @@ RT5:
assert rt4_deep.get_parents()[0].name == "RT3" assert rt4_deep.get_parents()[0].name == "RT3"
rt5_deep = model.get_deep("RT5") rt5_deep = model.get_deep("RT5")
assert rt5_deep.get_parents()[0].name == "RT5" assert rt5_deep.get_parents()[0].name == "RT5"
@patch("linkahead.execute_query", new=Mock(side_effect=_mock_execute_query))
def test_empty_retrieve():
"""Special case: ``do_not_retrieve`` is set, or the retrieve result is empty."""
model_str = """
RT1:
description: Some text.
RT2:
obligatory_properties:
RT1:
# some_text:
# datatype: TEXT
NoRecords:
description: A RecordType without Records.
recommended_properties:
some_text:
datatype: TEXT
RT3:
obligatory_properties:
NoRecords:
"""
model = parse_model_from_string(model_str)
schema_default = rtjs(model.get_deep("RT2"))
assert "oneOf" in schema_default["properties"]["RT1"]
assert any([el.get("title") == "Existing entries" for el in
schema_default["properties"]["RT1"]["oneOf"]])
schema_noexist = rtjs(model.get_deep("RT3"))
assert schema_noexist["properties"]["NoRecords"].get("type") == "object"
schema_noexist_noremote = rtjs(model.get_deep("RT3"), no_remote=True)
assert schema_noexist_noremote["properties"]["NoRecords"].get("type") == "object"
assert (schema_noexist_noremote["properties"]["NoRecords"].get("properties")
== OrderedDict([('some_text', {'type': 'string'})]))
schema_noexist_noretrieve = rtjs(model.get_deep("RT2"), do_not_retrieve=["RT1"])
assert schema_noexist_noretrieve["properties"]["RT1"].get("type") == "object"
assert "some_date" in schema_noexist_noretrieve["properties"]["RT1"].get("properties")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment