diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index 172cc1c089c56d092a61feb705e15c6cc1aa2e83..f950b06f5d48f27536cbe4760113388df1dda2a1 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -36,10 +36,13 @@ class JsonSchemaExporter: """ def __init__(self, additional_properties: bool = True, - name_and_description_in_properties: bool = False, + name_property_for_new_records: bool = False, + description_property_for_new_records: bool = False, additional_options_for_text_props: dict = None, units_in_description: bool = True, do_not_create: List[str] = None, + do_not_retrieve: List[str] = None, + no_remote: bool = False, ): """Set up a JsonSchemaExporter, which can then be applied on RecordTypes. @@ -48,9 +51,12 @@ class JsonSchemaExporter: additional_properties : bool, optional Whether additional properties will be admitted in the resulting schema. Optional, default is True. - name_and_description_in_properties : bool, optional - Whether objects that are generated from reference properties shall have a `name` and - `description` property in the generated schema. Optional, default is False. + name_property_for_new_records : bool, optional + Whether objects shall generally have a `name` property in the generated schema. + Optional, default is False. + description_property_for_new_records : bool, optional + Whether objects shall generally have a `description` property in the generated schema. + Optional, default is False. additional_options_for_text_props : dict, optional Dictionary containing additional "pattern" or "format" options for string-typed properties. Optional, default is empty. @@ -63,17 +69,28 @@ class JsonSchemaExporter: A list of RedcordType names, for which there should be no option to create them. Instead, only the choice of existing elements should be given. + do_not_retrieve : list[str] + A list of RedcordType names, for which no Records shall be retrieved. Instead, only an + object description should be given. If this list overlaps with the `do_not_create` + parameter, the behavior is undefined. + no_remote : bool + If True, do not attempt to connect to a LinkAhead server at all. Default is False. """ if not additional_options_for_text_props: additional_options_for_text_props = {} if not do_not_create: do_not_create = [] + if not do_not_retrieve: + do_not_retrieve = [] self._additional_properties = additional_properties - self._name_and_description_in_properties = name_and_description_in_properties + self._name_property_for_new_records = name_property_for_new_records + self._description_property_for_new_records = description_property_for_new_records self._additional_options_for_text_props = additional_options_for_text_props self._units_in_description = units_in_description self._do_not_create = do_not_create + self._do_not_retrieve = do_not_retrieve + self._no_remote = no_remote @staticmethod def _make_required_list(rt: db.RecordType): @@ -151,22 +168,31 @@ class JsonSchemaExporter: prop_name = prop.datatype if isinstance(prop.datatype, db.Entity): prop_name = prop.datatype.name - values = self._retrieve_enum_values(f"RECORD '{prop_name}'") + if prop_name in self._do_not_retrieve: + values = [] + else: + values = self._retrieve_enum_values(f"RECORD '{prop_name}'") if prop_name in self._do_not_create: # Only a simple list of values json_prop["enum"] = values else: - rt = db.execute_query(f"FIND RECORDTYPE WITH name='{prop_name}'", - unique=True) + if self._no_remote: + rt = prop.datatype + else: + rt = db.execute_query(f"FIND RECORDTYPE WITH name='{prop_name}'", + unique=True) subschema = self._make_segment_from_recordtype(rt) - subschema["title"] = "Create new" - json_prop["oneOf"] = [ - { - "title": "Existing entries", - "enum": values, - }, - subschema - ] + if values: + subschema["title"] = "Create new" + json_prop["oneOf"] = [ + { + "title": "Existing entries", + "enum": values, + }, + subschema + ] + else: + json_prop = subschema else: raise ValueError( @@ -203,8 +229,10 @@ class JsonSchemaExporter: return prop - @staticmethod - def _retrieve_enum_values(role: str): + def _retrieve_enum_values(self, role: str): + + if self._no_remote: + return [] possible_values = db.execute_query(f"SELECT name, id FROM {role}") @@ -228,8 +256,9 @@ class JsonSchemaExporter: schema["additionalProperties"] = self._additional_properties props = OrderedDict() - if self._name_and_description_in_properties: + if self._name_property_for_new_records: props["name"] = self._make_text_property("The name of the Record to be created") + if self._description_property_for_new_records: props["description"] = self._make_text_property( "The description of the Record to be created") @@ -274,10 +303,14 @@ class JsonSchemaExporter: def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = True, - name_and_description_in_properties: bool = False, + name_property_for_new_records: bool = False, + description_property_for_new_records: bool = False, additional_options_for_text_props: Optional[dict] = None, units_in_description: bool = True, - do_not_create: List[str] = None): + do_not_create: List[str] = None, + do_not_retrieve: List[str] = None, + no_remote: bool = False, + ): """Create a jsonschema from a given RecordType that can be used, e.g., to validate a json specifying a record of the given type. @@ -291,9 +324,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T additional_properties : bool, optional Whether additional properties will be admitted in the resulting schema. Optional, default is True. - name_and_description_in_properties : bool, optional - Whether objects that are generated from reference properties shall have a `name` and - `description` property in the generated schema. Optional, default is False. + name_property_for_new_records : bool, optional + Whether objects shall generally have a `name` property in the generated schema. Optional, + default is False. + description_property_for_new_records : bool, optional + Whether objects shall generally have a `description` property in the generated schema. + Optional, default is False. additional_options_for_text_props : dict, optional Dictionary containing additional "pattern" or "format" options for string-typed properties. Optional, default is empty. @@ -306,6 +342,12 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T A list of RedcordType names, for which there should be no option to create them. Instead, only the choice of existing elements should be given. + do_not_retrieve : list[str] + A list of RedcordType names, for which no Records shall be retrieved. Instead, only an + object description should be given. If this list overlaps with the `do_not_create` + parameter, the behavior is undefined. + no_remote : bool + If True, do not attempt to connect to a LinkAhead server at all. Default is False. Returns ------- @@ -316,10 +358,13 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T exporter = JsonSchemaExporter( additional_properties=additional_properties, - name_and_description_in_properties=name_and_description_in_properties, + name_property_for_new_records=name_property_for_new_records, + description_property_for_new_records=description_property_for_new_records, additional_options_for_text_props=additional_options_for_text_props, units_in_description=units_in_description, do_not_create=do_not_create, + do_not_retrieve=do_not_retrieve, + no_remote=no_remote, ) return exporter.recordtype_to_json_schema(rt) @@ -408,7 +453,7 @@ out : dict "properties": sub_schemas, "required": required, "additionalProperties": False, - "$schema": "https://json-schema.org/draft/2019-09/schema", + "$schema": "https://json-schema.org/draft/2020-12/schema", } return result diff --git a/src/caosadvancedtools/loadFiles.py b/src/caosadvancedtools/loadFiles.py index 27d867c41a11ee4a6b08e7ffc9df677a3697eced..405b3d135c8af89e32c74015bd04f76f21828e20 100755 --- a/src/caosadvancedtools/loadFiles.py +++ b/src/caosadvancedtools/loadFiles.py @@ -175,8 +175,8 @@ def loadpath(path, include, exclude, prefix, dryrun, forceAllowSymlinks, caosdbi for f in files: totalsize += f.size - logger.info("Made in total {} new files with a combined size of {} " - "accessible.".format(len(files), convert_size(totalsize))) + logger.info( + f"Made new files accessible: {len(files)}, combined size: {convert_size(totalsize)} ") return diff --git a/unittests/test_json_schema_exporter.py b/unittests/test_json_schema_exporter.py index 18a375363c175dd6d2fd1736023ef29afa110bfd..95601ed9c07d6331d5340d63742d3e8dc5f74570 100644 --- a/unittests/test_json_schema_exporter.py +++ b/unittests/test_json_schema_exporter.py @@ -27,6 +27,8 @@ import json import linkahead as db import caosadvancedtools.json_schema_exporter as jsex +from collections import OrderedDict + from jsonschema import FormatChecker, validate, ValidationError from pytest import raises from unittest.mock import Mock, patch @@ -85,6 +87,8 @@ def _mock_execute_query(query_string, unique=False, **kwargs): return all_files else: print(f"Query string: {query_string}") + if unique is True: + return db.Entity() return db.Container() @@ -108,7 +112,8 @@ def test_empty_rt(): assert len(schema["required"]) == 0 assert schema["additionalProperties"] is False - schema = rtjs(rt, name_and_description_in_properties=True) + schema = rtjs(rt, name_property_for_new_records=True, + description_property_for_new_records=True) assert len(schema["properties"]) == 2 assert "name" in schema["properties"] @@ -435,8 +440,8 @@ def test_rt_with_references(): rt = db.RecordType() rt.add_property(name="RefProp", datatype=db.LIST("OtherType")) - schema = rtjs(rt, additional_properties=False, - name_and_description_in_properties=True) + schema = rtjs(rt, additional_properties=False, name_property_for_new_records=True, + description_property_for_new_records=True) assert schema["additionalProperties"] is False assert "name" in schema["properties"] assert schema["properties"]["name"]["type"] == "string" @@ -714,17 +719,23 @@ RT2: # Merge the schemata merged_list = jsex.merge_schemas([schema_RT1, schema_RT2]) + with raises(ValidationError): + validate({}, merged_list) assert merged_list["type"] == "object" assert merged_list["properties"]["RT1"]["title"] == "RT1" assert merged_list["properties"]["RT2"]["properties"]["some_text"]["type"] == "string" merged_dict = jsex.merge_schemas({"schema1": schema_RT1, "schema2": schema_RT2}) + with raises(ValidationError): + validate({}, merged_dict) assert merged_dict["type"] == "object" assert merged_dict["properties"]["schema1"]["title"] == "RT1" assert merged_dict["properties"]["schema2"]["properties"]["some_text"]["type"] == "string" # Make an array array = jsex.make_array(schema_RT1) + with raises(ValidationError): + validate({}, array) assert array["type"] == "array" assert array["items"] == schema_RT1 @@ -773,3 +784,42 @@ RT5: assert rt4_deep.get_parents()[0].name == "RT3" rt5_deep = model.get_deep("RT5") assert rt5_deep.get_parents()[0].name == "RT5" + + +@patch("linkahead.execute_query", new=Mock(side_effect=_mock_execute_query)) +def test_empty_retrieve(): + """Special case: ``do_not_retrieve`` is set, or the retrieve result is empty.""" + model_str = """ +RT1: + description: Some text. +RT2: + obligatory_properties: + RT1: +# some_text: +# datatype: TEXT +NoRecords: + description: A RecordType without Records. + recommended_properties: + some_text: + datatype: TEXT +RT3: + obligatory_properties: + NoRecords: + """ + model = parse_model_from_string(model_str) + schema_default = rtjs(model.get_deep("RT2")) + assert "oneOf" in schema_default["properties"]["RT1"] + assert any([el.get("title") == "Existing entries" for el in + schema_default["properties"]["RT1"]["oneOf"]]) + + schema_noexist = rtjs(model.get_deep("RT3")) + assert schema_noexist["properties"]["NoRecords"].get("type") == "object" + + schema_noexist_noremote = rtjs(model.get_deep("RT3"), no_remote=True) + assert schema_noexist_noremote["properties"]["NoRecords"].get("type") == "object" + assert (schema_noexist_noremote["properties"]["NoRecords"].get("properties") + == OrderedDict([('some_text', {'type': 'string'})])) + + schema_noexist_noretrieve = rtjs(model.get_deep("RT2"), do_not_retrieve=["RT1"]) + assert schema_noexist_noretrieve["properties"]["RT1"].get("type") == "object" + assert "some_date" in schema_noexist_noretrieve["properties"]["RT1"].get("properties")