From a10d0b64dceddbf18fd92c492db758b895d68173 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Mon, 3 Mar 2025 11:24:37 +0100 Subject: [PATCH 01/36] TST: Use _validate_jsonschema instead of jsonschema.validate in fill_xlsx, move _validate_jsonschema to own file and and adjust tests to match --- .../table_json_conversion/convert.py | 47 +-------- .../table_json_conversion/fill_xlsx.py | 10 +- .../table_json_conversion/validation_utils.py | 98 +++++++++++++++++++ .../table_json_conversion/test_fill_xlsx.py | 9 +- 4 files changed, 113 insertions(+), 51 deletions(-) create mode 100644 src/caosadvancedtools/table_json_conversion/validation_utils.py diff --git a/src/caosadvancedtools/table_json_conversion/convert.py b/src/caosadvancedtools/table_json_conversion/convert.py index 7a3d63a2..33432b85 100644 --- a/src/caosadvancedtools/table_json_conversion/convert.py +++ b/src/caosadvancedtools/table_json_conversion/convert.py @@ -31,12 +31,12 @@ from operator import getitem from types import SimpleNamespace from typing import Any, BinaryIO, Callable, TextIO, Union, Optional from warnings import warn -from copy import deepcopy import jsonschema from openpyxl import load_workbook from openpyxl.worksheet.worksheet import Worksheet +from .validation_utils import _validate_jsonschema from caosadvancedtools.table_json_conversion import xlsx_utils from caosadvancedtools.table_json_conversion.fill_xlsx import read_or_dict @@ -153,51 +153,6 @@ class ForeignError(KeyError): self.definitions = definitions -def _validate_jsonschema(instance, schema): - # Checks whether a key: value pair is in the given schema or fulfills the - # criteria of a direct subschema (anyOf, allOf, oneOf) - def in_schema(key, val, schema): - if schema.get(key, None) == val: - return True - if 'anyOf' in schema: - return any([in_schema(key, val, sub) for sub in schema['anyOf']]) - if 'allOf' in schema: - return all([in_schema(key, val, sub) for sub in schema['allOf']]) - if 'oneOf' in schema: - return [in_schema(key, val, sub) for sub in schema['oneOf']].count(True) == 1 - return False - - # Removes Key: None and datetime instances from nested dicts and lists of - # any depth. Key: None is currently valid as there is no 'obligatory with - # value', and datetime cannot be checked by jsonschema. - def remove_incompatible_values(it, schema): - if isinstance(it, list): - schema = schema.get('items', schema) - for elem in it: - remove_incompatible_values(elem, schema) - elif isinstance(it, dict): - schema = schema.get('properties', schema) - for key, elem in list(it.items()): - if elem is None: - it.pop(key) - elif isinstance(elem, datetime.date) or isinstance(elem, datetime.datetime): - if in_schema('format', 'date', schema[key]) or in_schema('format', 'date-time', schema[key]): - it.pop(key) - elif isinstance(it, (dict, list)): - remove_incompatible_values(elem, schema[key]) - return it - - # If instance is not a dict, remove_incompatible_values would not remove - # the value if it is valid, so we need to check manually by wrapping - instance = deepcopy(instance) - if not isinstance(instance, dict): - if remove_incompatible_values({'key': instance}, {'key': schema}) == {}: - return - # Clean dict and validate - instance = remove_incompatible_values(deepcopy(instance), schema) - jsonschema.validate(instance, schema=schema) - - class XLSXConverter: """Class for conversion from XLSX to JSON. diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index f2e0abc3..fe62731f 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -28,11 +28,11 @@ from types import SimpleNamespace from typing import Any, Optional, TextIO, Union from warnings import warn -from jsonschema import FormatChecker, validate from jsonschema.exceptions import ValidationError from openpyxl import load_workbook, Workbook from openpyxl.cell.cell import ILLEGAL_CHARACTERS_RE +from .validation_utils import _validate_jsonschema from .xlsx_utils import ( array_schema_from_model_schema, get_foreign_key_columns, @@ -354,10 +354,12 @@ validation_schema: dict, optional # Validation if validation_schema is not None: - validation_schema = array_schema_from_model_schema(read_or_dict(validation_schema)) + # convert to array_schema is given schema is a model_schema + if 'properties' in validation_schema and validation_schema['properties'].values(): + if list(validation_schema['properties'].values())[0]["type"] != "array": + validation_schema = array_schema_from_model_schema(read_or_dict(validation_schema)) try: - # FIXME redefine checker for datetime - validate(data, validation_schema, format_checker=FormatChecker()) + _validate_jsonschema(data, validation_schema) except ValidationError as verr: print(verr.message) raise verr diff --git a/src/caosadvancedtools/table_json_conversion/validation_utils.py b/src/caosadvancedtools/table_json_conversion/validation_utils.py new file mode 100644 index 00000000..4d5e0741 --- /dev/null +++ b/src/caosadvancedtools/table_json_conversion/validation_utils.py @@ -0,0 +1,98 @@ +# encoding: utf-8 +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2025 Indiscale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. + +""" +Utilities for validation of conversion / import / export results. +For internal use. +""" + +import datetime +import json +from copy import deepcopy +from typing import Union + +import jsonschema + + +def _validate_jsonschema(instance: Union[dict, int, str, bool], + schema: Union[str, dict]): + """ + A table_json_conversion compatible variant of jsonschema.validate(). + Accepts instances with datetime instances and None in not-nullable entries. + + Parameters + ---------- + instance : dict, int, str, bool + Either a dict or a json entry to check against the given schema. + schema : str, dict + Either a dict with the jsonschema to check against, or a path to a file + containing the same. + """ + # Helper Functions + def _in_schema(key, val, schema): + """ + Checks whether a key: value pair is in the given schema or fulfills the + criteria of a direct subschema (anyOf, allOf, oneOf). + """ + if schema.get(key, None) == val: + return True + if 'anyOf' in schema: + return any([_in_schema(key, val, sub) for sub in schema['anyOf']]) + if 'allOf' in schema: + return all([_in_schema(key, val, sub) for sub in schema['allOf']]) + if 'oneOf' in schema: + return [_in_schema(key, val, sub) for sub in schema['oneOf']].count(True) == 1 + return False + + def _remove_incompatible_vals(iterable, schema): + """ + Removes Key: None and datetime instances from nested dicts and lists of + any depth. Key: None is currently valid as there is no 'obligatory with + value', and datetime cannot be checked by jsonschema. + """ + if isinstance(iterable, list): + schema = schema.get('items', schema) + for elem in iterable: + _remove_incompatible_vals(elem, schema) + elif isinstance(iterable, dict): + schema = schema.get('properties', schema) + for key, elem in list(iterable.items()): + if elem is None: + iterable.pop(key) + elif isinstance(elem, (datetime.date, datetime.datetime)): + if (_in_schema('format', 'date', schema[key]) or + _in_schema('format', 'date-time', schema[key])): + iterable.pop(key) + elif isinstance(iterable, (dict, list)): + _remove_incompatible_vals(elem, schema[key]) + return iterable + + # If jsonschema is a file, load its content + if str(schema).endswith(".json"): + with open(schema, encoding="utf-8") as content: + schema = json.load(content) + # If instance is not a dict, remove_incompatible_values would not remove + # the value if it is valid, so we need to check manually by wrapping + instance = deepcopy(instance) + if not isinstance(instance, dict): + if _remove_incompatible_vals({'key': instance}, {'key': schema}) == {}: + return + # Clean dict and validate + instance = _remove_incompatible_vals(deepcopy(instance), schema) + jsonschema.validate(instance, schema=schema) diff --git a/unittests/table_json_conversion/test_fill_xlsx.py b/unittests/table_json_conversion/test_fill_xlsx.py index 899bb81e..f77131bc 100644 --- a/unittests/table_json_conversion/test_fill_xlsx.py +++ b/unittests/table_json_conversion/test_fill_xlsx.py @@ -59,13 +59,20 @@ schema: str, optional, custom_output: str, optional If given, write to this file and drop into an IPython shell. For development only. """ + if schema is not None: + with open(schema, encoding="utf8", mode="r") as sch_f: + model_schema = json.load(sch_f) + data_schema = xlsx_utils.array_schema_from_model_schema(model_schema) + else: + data_schema = schema + with tempfile.TemporaryDirectory() as tmpdir: outfile = os.path.join(tmpdir, 'test.xlsx') assert not os.path.exists(outfile) if custom_output is not None: outfile = custom_output fill_template(data=json_file, template=template_file, result=outfile, - validation_schema=schema) + validation_schema=data_schema) assert os.path.exists(outfile) generated = load_workbook(outfile) # workbook can be read known_good_wb = load_workbook(known_good) -- GitLab From fff1c8f8200e89abe444bc9d126877c6e03c53b5 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Wed, 5 Mar 2025 09:30:03 +0100 Subject: [PATCH 02/36] WIP: Add function to export records to xlsx: - Added parameter use_id_for_identification to JsonSchemaExporter, which sets foreign key to id and adds an 'id' column to all tables - Added parameter return_data_schema to merge_schemas. If set, merge_schemas returns a data_schema as well as the normal model_schema - Added new file export_import_xlsx. In addition to private functions for generating templates, schemas, and data, there is a new public function export_container_to_xlsx which generates an xlsx file containing the data from the given records at a given path - Changed a print warning in fill_xlsx to warnings.warn for easier filtering --- src/caosadvancedtools/json_schema_exporter.py | 53 +++- .../export_import_xlsx.py | 237 ++++++++++++++++++ .../table_json_conversion/fill_xlsx.py | 3 +- 3 files changed, 288 insertions(+), 5 deletions(-) create mode 100644 src/caosadvancedtools/table_json_conversion/export_import_xlsx.py diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index 56568ca1..bce3102e 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -70,6 +70,7 @@ class JsonSchemaExporter: def __init__(self, additional_properties: bool = True, name_property_for_new_records: bool = False, + use_id_for_identification: bool = False, description_property_for_new_records: bool = False, additional_options_for_text_props: dict = None, additional_json_schema: Dict[str, dict] = None, @@ -92,6 +93,9 @@ class JsonSchemaExporter: name_property_for_new_records : bool, optional Whether objects shall generally have a `name` property in the generated schema. Optional, default is False. + use_id_for_identification: bool, optional + If set to true, an 'id' property is added to all records, and + foreign key references are assumed to be ids. description_property_for_new_records : bool, optional Whether objects shall generally have a `description` property in the generated schema. Optional, default is False. @@ -151,6 +155,7 @@ class JsonSchemaExporter: self._additional_properties = additional_properties self._name_property_for_new_records = name_property_for_new_records + self._use_id_for_identification = use_id_for_identification self._description_property_for_new_records = description_property_for_new_records self._additional_options_for_text_props = additional_options_for_text_props self._additional_json_schema = additional_json_schema @@ -257,7 +262,18 @@ ui_schema : dict if inner_ui_schema: ui_schema["items"] = inner_ui_schema elif prop.is_reference(): - if prop.datatype == db.REFERENCE: + if self._use_id_for_identification: + json_prop["type"] = "object" + json_prop["required"] = [] + json_prop["additionalProperties"] = False + json_prop["title"] = prop.name + if prop.datatype == db.FILE: + json_prop["description"] = "Path to file" + json_prop["properties"] = {"path": {"type": "string"}} + else: + json_prop["properties"] = { + "id": {"oneOf": [{"type": "integer"}, {"type": "string"}]}} + elif prop.datatype == db.REFERENCE: # No Record creation since no RT is specified and we don't know what # schema to use, so only enum of all Records and all Files. values = self._retrieve_enum_values("RECORD") + self._retrieve_enum_values("FILE") @@ -410,7 +426,9 @@ ui_schema : dict vals = [] for val in possible_values: - if val.name: + if self._use_id_for_identification: + vals.append(val.id) + elif val.name: vals.append(f"{val.name}") else: vals.append(f"{val.id}") @@ -453,6 +471,8 @@ ui_schema : dict props = OrderedDict() if self._name_property_for_new_records: props["name"] = self._make_text_property("The name of the Record to be created") + if self._use_id_for_identification: + props["id"] = self._make_text_property("The id of the Record") if self._description_property_for_new_records: props["description"] = self._make_text_property( "The description of the Record to be created") @@ -544,6 +564,7 @@ guaranteed (as of now). def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = True, name_property_for_new_records: bool = False, + use_id_for_identification: bool = False, description_property_for_new_records: bool = False, additional_options_for_text_props: Optional[dict] = None, additional_json_schema: Dict[str, dict] = None, @@ -573,6 +594,9 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T name_property_for_new_records : bool, optional Whether objects shall generally have a `name` property in the generated schema. Optional, default is False. + use_id_for_identification: bool, optional + If set to true, an 'id' property is added to all records, and foreign + key references are assumed to be ids. description_property_for_new_records : bool, optional Whether objects shall generally have a `description` property in the generated schema. Optional, default is False. @@ -629,6 +653,7 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T exporter = JsonSchemaExporter( additional_properties=additional_properties, name_property_for_new_records=name_property_for_new_records, + use_id_for_identification=use_id_for_identification, description_property_for_new_records=description_property_for_new_records, additional_options_for_text_props=additional_options_for_text_props, additional_json_schema=additional_json_schema, @@ -696,8 +721,8 @@ ui_schema : dict, optional def merge_schemas(schemas: Union[Dict[str, dict], Iterable[dict]], - rjsf_uischemas: Union[Dict[str, dict], Sequence[dict]] = None) -> ( - Union[dict, Tuple[dict, dict]]): + rjsf_uischemas: Union[Dict[str, dict], Sequence[dict]] = None, + return_data_schema=False) -> (Union[dict, Tuple[dict, dict]]): """Merge the given schemata into a single schema. The result will look like this: @@ -728,6 +753,11 @@ rjsf_uischemas : dict[str, dict] | Iterable[dict], optional If given, also merge the react-jsonschema-forms from this argument and return as the second return value. If ``schemas`` is a dict, this parameter must also be a dict, if ``schemas`` is only an iterable, this paramater must support numerical indexing. +return_data_schema : bool, default False + If set to True, a second schema with all top-level entries wrapped in an + array will be returned. This is necessary if the schema describes the + data layout of an XLSX file. + Cannot be used together with rjsf_uischemas. Returns ------- @@ -737,10 +767,13 @@ schema : dict uischema : dict If ``rjsf_uischemas`` was given, this contains the merged UI schemata. +data_schema : dict + If ``return_data_schema`` was given, this contains the XLSX file schema. """ sub_schemas: dict[str, dict] = OrderedDict() required = [] ui_schema = None + data_sub_schemas = OrderedDict() if isinstance(schemas, dict): sub_schemas = schemas @@ -754,6 +787,8 @@ uischema : dict for i, schema in enumerate(schemas, start=1): title = schema.get("title", str(i)) sub_schemas[title] = schema + if return_data_schema: + data_sub_schemas[title] = {"type": "array", "items": schema} required.append(title) if rjsf_uischemas is not None: if not isinstance(rjsf_uischemas, Sequence): @@ -771,7 +806,17 @@ uischema : dict "additionalProperties": False, "$schema": "https://json-schema.org/draft/2020-12/schema", } + if return_data_schema: + data_schema = { + "type": "object", + "properties": data_sub_schemas, + "required": required, + "additionalProperties": False, + "$schema": "https://json-schema.org/draft/2020-12/schema", + } if ui_schema is not None: return result, ui_schema + if return_data_schema: + return result, data_schema return result diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py new file mode 100644 index 00000000..ed2b9720 --- /dev/null +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -0,0 +1,237 @@ +# encoding: utf-8 +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2025 Indiscale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. + +""" +Utilities for automatically exporting and importing data to and from xlsx. +""" + +import json +import tempfile +import warnings +from typing import Union +from pathlib import Path + +import linkahead +from linkahead.common.models import Container +from linkahead import execute_query +with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message="^.*experimental.*$") + from linkahead.high_level_api import convert_to_python_object + +from ..json_schema_exporter import JsonSchemaExporter, merge_schemas +from .table_generator import XLSXTemplateGenerator +from .fill_xlsx import fill_template + + +def _generate_jsonschema_from_recordtypes(recordtypes: list, + out_path: Union[str, Path] = None) -> dict: + """ + Generate a combined jsonschema for all given recordtypes. + + Parameters + ---------- + recordtypes : Iterable + List of RecordType entities for which a schema should be generated. + out_path : str, Path + If given, the resulting jsonschema will also be written to the file + given by out_path. + Optional, default None + + Returns + ------- + data_schema : dict + The generated schema. + """ + # Generate schema + schema_generator = JsonSchemaExporter(additional_properties=False, + name_property_for_new_records=True, + use_id_for_identification=True) + schemas = [schema_generator.recordtype_to_json_schema(recordtype) + for recordtype in recordtypes] + _, data_schema = merge_schemas(schemas, return_data_schema=True) + # If indicated, save as json file + if out_path is not None: + with open(out_path, mode="w", encoding="utf8") as json_file: + json.dump(data_schema, json_file, ensure_ascii=False, indent=2) + # Return + return data_schema + + +def _generate_jsondata_from_records(records: Container, + out_path: Union[str, Path] = None) -> dict: + """ + Extract relevant information (id, name, properties, etc.) from the given + records and converts this information to json. + + Parameters + ---------- + records : Iterable + List of Record entities from which the data will be converted to json. + out_path : str, Path + If given, the resulting jsondata will also be written to the file given + by out_path. + Optional, default None + + Returns + ------- + json_data : dict + The given records data in json form. + """ + json_data = {} + # Ignore warning from high_level_api to avoid raising warnings that cannot + # be avoided by user + for record in records: + # Convert records to high level api objects + record_obj = convert_to_python_object(record) + try: + record_obj.resolve_references(True, None) + except linkahead.LinkAheadException: + warnings.warn(f"Data for record with id {record_obj.id} might be " + f"incomplete, unsuccessful retrieve.") + # Get json representation & adjust layout for compatibility + raw_data = record_obj.serialize() + raw_data.update(raw_data.get('properties', {})) + raw_data.pop('properties') + if record.parents[0].name not in json_data: + json_data[record.parents[0].name] = [] + json_data[record.parents[0].name].append(raw_data) + # If indicated, save as json file + if out_path is not None: + with open(out_path, mode="w", encoding="utf8") as json_file: + json.dump(json_data, json_file, ensure_ascii=False, indent=2, default=str) + # Return + return json_data + + +def _generate_xlsx_template_file(schema: dict, + recordtype_names: Union[list, set], + out_path: Union[str, Path]): + """ + Generate an empty XLSX template file for the given schema at the indicated + location. + + Parameters + ---------- + schema : dict + Jsonschema for which an xlsx template should be generated. + recordtype_names : Iterable + List of all RecordType names in the given schema. + out_path : str, Path + The resulting xlsx template will be written to the file at this path. + """ + generator = XLSXTemplateGenerator() + foreign_keys = {name: {"__this__": ['id']} for name in recordtype_names} + generator.generate(schema=schema, foreign_keys=foreign_keys, + filepath=out_path) + + +def export_container_to_xlsx(records: Container, + xlsx_data_filepath: Union[str, Path], + include_referenced_entities: bool = False, + jsonschema_filepath: Union[str, Path] = None, + jsondata_filepath: Union[str, Path] = None, + xlsx_template_filepath: Union[str, Path] = None): + """ + Export the data of the given records to an xlsx file. + + Parameters + ---------- + records : Container, Iterable + List of records to export. + xlsx_data_filepath : str, Path + Write the resulting xlsx file to the file at this location. + include_referenced_entities : bool + If set to true, any records referenced by properties of those given in + 'records' will also be exported. + Optional, default False + jsonschema_filepath : str, Path + If given, write the jsonschema to this file. + Optional, default None + jsondata_filepath : str, Path + If given, write the json data to this file. + Optional, default None + xlsx_template_filepath : str, Path + If given, write the xlsx template to this file. + Optional, default None + """ + # Ensure every record is only handled once by using id as key. + entity_ids = {record.id for record in records} + # If indicated, also get and add the records referenced on the first level + # in the given container + if include_referenced_entities: + for record in records: + for prop in record.properties: + if prop.is_reference() and prop.value is not None: + try: + ref_list = prop.value + if not isinstance(ref_list, list): + ref_list = [ref_list] + for element in ref_list: + if isinstance(element, (int, str)): + elem_id = element + elif isinstance(element, linkahead.Entity): + elem_id = element.id + else: + warnings.warn(f"Cannot handle referenced " + f"entity '{prop.value}'") + continue + entity_ids.add(elem_id) + except linkahead.LinkAheadException as e: + warnings.warn(f"Cannot handle referenced entity " + f"'{prop.value}' because of error '{e}'") + # Retrieve data + new_records = [] + for entity_id in entity_ids: + entity_id = str(entity_id).split('@')[0] + entity = execute_query(f"FIND ENTITY WITH (ID = {entity_id})", unique=True) + if len(entity.get_parents()) > 0: + new_records.append(entity) + # ToDo: Handle Files and other Entities (e.g. Properties) separately + records = new_records + recordtypes = {record.parents[0] for record in records} + recordtype_ids = {recordtype.id for recordtype in recordtypes} + recordtypes = [execute_query(f"FIND RECORDTYPE WITH (ID = {rt_id})", + unique=True) + for rt_id in recordtype_ids] + recordtype_names = {recordtype.name for recordtype in recordtypes} + # Generate schema and data from the records + json_schema = _generate_jsonschema_from_recordtypes(recordtypes, + jsonschema_filepath) + json_data = _generate_jsondata_from_records(records, jsondata_filepath) + # Generate xlsx template with tempfile if necessary + if xlsx_template_filepath is None: + xlsx_template_file = tempfile.NamedTemporaryFile(suffix='.xlsx') + xlsx_template_filepath = xlsx_template_file.name + else: + xlsx_template_file = None + _generate_xlsx_template_file(json_schema, recordtype_names, + xlsx_template_filepath) + # Fill xlsx file with data + with warnings.catch_warnings(): + # We have a lot of information in the json data that we do not need + warnings.filterwarnings("ignore", + message="^.*Ignoring path with missing sheet index.*$") + warnings.filterwarnings("ignore", + message="^.*No validation schema.*$") + fill_template(data=json_data, template=xlsx_template_filepath, + result=xlsx_data_filepath) + # ToDo: Validation + # Cleanup + if xlsx_template_file is not None: + xlsx_template_file.close() diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index fe62731f..92fae16c 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -24,6 +24,7 @@ from __future__ import annotations import datetime import pathlib +import warnings from types import SimpleNamespace from typing import Any, Optional, TextIO, Union from warnings import warn @@ -364,7 +365,7 @@ validation_schema: dict, optional print(verr.message) raise verr else: - print("No validation schema given, continue at your own risk.") + warnings.warn("No validation schema given, continue at your own risk.") # Filling the data result_wb = load_workbook(template) -- GitLab From 76ed7bfeeefb72a2988dd7ad634b46293fc9cd62 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Wed, 5 Mar 2025 13:05:56 +0100 Subject: [PATCH 03/36] ENH: XLSX Export cleanup incl. remove broken warnings filter --- .../table_json_conversion/export_import_xlsx.py | 7 ++----- .../table_json_conversion/validation_utils.py | 5 ++++- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index ed2b9720..8730d986 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -30,9 +30,7 @@ from pathlib import Path import linkahead from linkahead.common.models import Container from linkahead import execute_query -with warnings.catch_warnings(): - warnings.filterwarnings("ignore", message="^.*experimental.*$") - from linkahead.high_level_api import convert_to_python_object +from linkahead.high_level_api import convert_to_python_object from ..json_schema_exporter import JsonSchemaExporter, merge_schemas from .table_generator import XLSXTemplateGenerator @@ -100,7 +98,7 @@ def _generate_jsondata_from_records(records: Container, # Convert records to high level api objects record_obj = convert_to_python_object(record) try: - record_obj.resolve_references(True, None) + record_obj.resolve_references(False, None) except linkahead.LinkAheadException: warnings.warn(f"Data for record with id {record_obj.id} might be " f"incomplete, unsuccessful retrieve.") @@ -231,7 +229,6 @@ def export_container_to_xlsx(records: Container, message="^.*No validation schema.*$") fill_template(data=json_data, template=xlsx_template_filepath, result=xlsx_data_filepath) - # ToDo: Validation # Cleanup if xlsx_template_file is not None: xlsx_template_file.close() diff --git a/src/caosadvancedtools/table_json_conversion/validation_utils.py b/src/caosadvancedtools/table_json_conversion/validation_utils.py index 4d5e0741..f1e77f48 100644 --- a/src/caosadvancedtools/table_json_conversion/validation_utils.py +++ b/src/caosadvancedtools/table_json_conversion/validation_utils.py @@ -80,7 +80,10 @@ def _validate_jsonschema(instance: Union[dict, int, str, bool], _in_schema('format', 'date-time', schema[key])): iterable.pop(key) elif isinstance(iterable, (dict, list)): - _remove_incompatible_vals(elem, schema[key]) + try: + _remove_incompatible_vals(elem, schema[key]) + except KeyError: + pass return iterable # If jsonschema is a file, load its content -- GitLab From 93fdae4167cc7d2b33f45fb15b1c723dd570d396 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Sun, 9 Mar 2025 13:46:10 +0100 Subject: [PATCH 04/36] TST: Add more XLSX tests, unignore validation parameter in convert_and_compare, fix typo --- .../table_json_conversion/fill_xlsx.py | 2 +- .../table_json_conversion/test_fill_xlsx.py | 26 +++++++++++++++ .../table_json_conversion/test_read_xlsx.py | 32 +++++++++++++++++-- 3 files changed, 57 insertions(+), 3 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index 92fae16c..1f39f66d 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -355,7 +355,7 @@ validation_schema: dict, optional # Validation if validation_schema is not None: - # convert to array_schema is given schema is a model_schema + # convert to array_schema if given schema is a model_schema if 'properties' in validation_schema and validation_schema['properties'].values(): if list(validation_schema['properties'].values())[0]["type"] != "array": validation_schema = array_schema_from_model_schema(read_or_dict(validation_schema)) diff --git a/unittests/table_json_conversion/test_fill_xlsx.py b/unittests/table_json_conversion/test_fill_xlsx.py index f77131bc..084f19ba 100644 --- a/unittests/table_json_conversion/test_fill_xlsx.py +++ b/unittests/table_json_conversion/test_fill_xlsx.py @@ -196,6 +196,32 @@ def test_errors(): known_good=rfp("data/simple_data.xlsx"), schema=rfp("data/simple_schema.json")) assert exc.value.message == "0.5 is not of type 'integer'" + # Check wrong data + with open(rfp("data/simple_data.json")) as json_file: + json_data = json.load(json_file) + json_data["Training"][0]["date"] = "2023-01" + with tempfile.NamedTemporaryFile(suffix='.json', mode='w+t') as temp_file: + json.dump(json_data, temp_file) + temp_file.seek(0) + with pytest.raises(AssertionError) as exc: + fill_and_compare(json_file=temp_file.name, + template_file=rfp("data/simple_template.xlsx"), + known_good=rfp("data/simple_data.xlsx"), + schema=rfp("data/simple_schema.json")) + assert "Training" in str(exc) and "2023-01" in str(exc) + # Check wrong schema + with open(rfp("data/simple_schema.json")) as json_file: + json_schema = json.load(json_file) + json_schema["properties"]["Person"]["properties"]["given_name"]["type"] = "integer" + with tempfile.NamedTemporaryFile(suffix='.json', mode='w+t') as temp_file: + json.dump(json_schema, temp_file) + temp_file.seek(0) + with pytest.raises(schema_exc.ValidationError) as exc: + fill_and_compare(json_file=rfp("data/simple_data.json"), + template_file=rfp("data/simple_template.xlsx"), + known_good=rfp("data/simple_data.xlsx"), + schema=temp_file.name) + assert "integer" in str(exc) def test_data_schema_generation(): diff --git a/unittests/table_json_conversion/test_read_xlsx.py b/unittests/table_json_conversion/test_read_xlsx.py index d453ab35..10b462df 100644 --- a/unittests/table_json_conversion/test_read_xlsx.py +++ b/unittests/table_json_conversion/test_read_xlsx.py @@ -24,6 +24,7 @@ import datetime import json import os import re +import tempfile from types import SimpleNamespace from typing import Optional @@ -43,7 +44,7 @@ def rfp(*pathcomponents): def convert_and_compare(xlsx_file: str, schema_file: str, known_good_file: str, known_good_data: Optional[dict] = None, strict: bool = False, - validate: bool = False) -> dict: + validate: bool = True) -> dict: """Convert an XLSX file and compare to a known result. Exactly one of ``known_good_file`` and ``known_good_data`` should be non-empty. @@ -57,7 +58,7 @@ json: dict model_schema = json.load(sch_f) data_schema = xlsx_utils.array_schema_from_model_schema(model_schema) - result = convert.to_dict(xlsx=xlsx_file, schema=data_schema, validate=True) + result = convert.to_dict(xlsx=xlsx_file, schema=data_schema, validate=validate) if known_good_file: with open(known_good_file, encoding="utf-8") as myfile: expected = json.load(myfile) @@ -101,6 +102,33 @@ def test_conversions(): assert str(err.value).startswith("Values at path ['Training', 0, ") +def test_validation(): + # Check wrong data + with open(rfp("data/simple_data.json")) as json_file: + known_good = json.load(json_file) + known_good["Training"][0]["date"] = "2023-01-02" + with tempfile.NamedTemporaryFile(suffix='.json', mode='w+t') as temp_file: + json.dump(known_good, temp_file) + temp_file.seek(0) + with pytest.raises(AssertionError) as exc: + convert_and_compare(xlsx_file=rfp("data/simple_data.xlsx"), + schema_file=rfp("data/simple_schema.json"), + known_good_file=temp_file.name) + assert "Training" in str(exc) and "2023-01-02" in str(exc) + # Check wrong schema + with open(rfp("data/simple_schema.json")) as json_file: + json_schema = json.load(json_file) + json_schema["properties"]["Person"]["properties"]["given_name"]["type"] = "integer" + with tempfile.NamedTemporaryFile(suffix='.json', mode='w+t') as temp_file: + json.dump(json_schema, temp_file) + temp_file.seek(0) + with pytest.raises(jsonschema.ValidationError) as exc: + convert_and_compare(xlsx_file=rfp("data/simple_data.xlsx"), + schema_file=temp_file.name, + known_good_file=rfp("data/simple_data.json")) + assert "integer" in str(exc) + + def test_missing_columns(): with pytest.raises(ValueError) as caught: convert.to_dict(xlsx=rfp("data/simple_data_missing.xlsx"), -- GitLab From 376cd77ccc7971b4bbb4297364c8286e6c7ae2d3 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Sun, 9 Mar 2025 14:11:23 +0100 Subject: [PATCH 05/36] DOC: Update Changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 404424de..dc465d78 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added ### +- Added table_json_conversion.export_import_xlsx with a public function + export_container_to_xlsx, which exports the data of a given Entity + Container to a XLSX file. + ### Changed ### ### Deprecated ### -- GitLab From 3b93d81e89ed955c59e91ad4903c70af0bd4fd44 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Sun, 9 Mar 2025 14:21:57 +0100 Subject: [PATCH 06/36] DOC: Add some comments --- .../table_json_conversion/export_import_xlsx.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 8730d986..0fe5d9d8 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -92,8 +92,6 @@ def _generate_jsondata_from_records(records: Container, The given records data in json form. """ json_data = {} - # Ignore warning from high_level_api to avoid raising warnings that cannot - # be avoided by user for record in records: # Convert records to high level api objects record_obj = convert_to_python_object(record) @@ -196,8 +194,10 @@ def export_container_to_xlsx(records: Container, # Retrieve data new_records = [] for entity_id in entity_ids: - entity_id = str(entity_id).split('@')[0] + entity_id = str(entity_id).split('@')[0] # Queries cannot handle version entity = execute_query(f"FIND ENTITY WITH (ID = {entity_id})", unique=True) + # We can currently only handle Entities with a parent, as otherwise we + # do not know which sheet they belong in. if len(entity.get_parents()) > 0: new_records.append(entity) # ToDo: Handle Files and other Entities (e.g. Properties) separately @@ -212,7 +212,9 @@ def export_container_to_xlsx(records: Container, json_schema = _generate_jsonschema_from_recordtypes(recordtypes, jsonschema_filepath) json_data = _generate_jsondata_from_records(records, jsondata_filepath) - # Generate xlsx template with tempfile if necessary + # Generate xlsx template + # _generate_xlsx_template_file needs a file name, so use NamedTemporaryFile + # ToDo: This might not work on windows, if not, fix _generate file handling if xlsx_template_filepath is None: xlsx_template_file = tempfile.NamedTemporaryFile(suffix='.xlsx') xlsx_template_filepath = xlsx_template_file.name -- GitLab From b1d0ec967959136d753f0932cfa5f2a0e6499397 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Wed, 12 Mar 2025 19:44:16 +0100 Subject: [PATCH 07/36] MNT: Suppress high_level_api import warning --- .../table_json_conversion/export_import_xlsx.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 0fe5d9d8..d41fe333 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -24,18 +24,24 @@ Utilities for automatically exporting and importing data to and from xlsx. import json import tempfile import warnings +import logging from typing import Union from pathlib import Path import linkahead from linkahead.common.models import Container from linkahead import execute_query -from linkahead.high_level_api import convert_to_python_object from ..json_schema_exporter import JsonSchemaExporter, merge_schemas from .table_generator import XLSXTemplateGenerator from .fill_xlsx import fill_template +# The high_level_api import would normally warn about the API being +# experimental. We know this, so suppress the warning. +logging.disable(logging.WARNING) +from linkahead.high_level_api import convert_to_python_object +logging.disable(logging.NOTSET) + def _generate_jsonschema_from_recordtypes(recordtypes: list, out_path: Union[str, Path] = None) -> dict: -- GitLab From aec14b4a192a66934e5ca817d8a756b45afc31a5 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Wed, 12 Mar 2025 20:00:18 +0100 Subject: [PATCH 08/36] STY: Ignore style issue --- .../table_json_conversion/export_import_xlsx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index d41fe333..ea18a374 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -39,7 +39,7 @@ from .fill_xlsx import fill_template # The high_level_api import would normally warn about the API being # experimental. We know this, so suppress the warning. logging.disable(logging.WARNING) -from linkahead.high_level_api import convert_to_python_object +from linkahead.high_level_api import convert_to_python_object # noqa: E402, pylint: disable=wrong-import-position logging.disable(logging.NOTSET) -- GitLab From f48ae99c01c5cdec0997dde4842e0db52717dc56 Mon Sep 17 00:00:00 2001 From: "i.nueske" <i.nueske@indiscale.com> Date: Tue, 8 Apr 2025 12:54:59 +0200 Subject: [PATCH 09/36] TEST: Add minimal integration test --- integrationtests/test_ex_import_xlsx.py | 86 +++++++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100755 integrationtests/test_ex_import_xlsx.py diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py new file mode 100755 index 00000000..89593e06 --- /dev/null +++ b/integrationtests/test_ex_import_xlsx.py @@ -0,0 +1,86 @@ +# encoding: utf-8 +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2025 Indiscale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. + +from pathlib import Path + +import linkahead as db +from caosadvancedtools.table_json_conversion import export_import_xlsx + + +def setup_function(function): + "Create needed test data" + try: + # Setup data structure + test_rt_0 = db.RecordType(name="Person", description="An observant human.") + test_prop_0 = db.Property(name="fullname", datatype=db.TEXT) + test_rt_0.add_property(test_prop_0) + test_rt_1 = db.RecordType(name="ObservationRecord") + test_prop_1 = db.Property(name="date", datatype=db.DATETIME) + test_prop_2 = db.Property(name="amount", datatype=db.INTEGER) + test_prop_3 = db.Property(name="observer", datatype=test_rt_0) + test_rt_1.add_property(test_prop_1).add_property(test_prop_2).add_property(test_prop_3) + test_rt_2 = db.RecordType(name="Conference") + test_prop_4 = db.Property(name="attendees", datatype=db.LIST(test_rt_0)) + test_rt_2.add_property(test_prop_4) + # Setup data + test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property(test_prop_0, value="Their Name") + test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property(test_prop_0, value="Also Name") + test_person_2 = db.Record(name="Person 2").add_parent(test_rt_0).add_property(test_prop_0, value="Third Name") + test_observation_0 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-01-01") + .add_property(test_prop_2, value=5).add_property(test_prop_3, value=test_person_1)) + test_observation_1 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-02-02") + .add_property(test_prop_2, value=3).add_property(test_prop_3, value=test_person_0)) + test_observation_2 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-03-03") + .add_property(test_prop_2, value=12).add_property(test_prop_3, value=test_person_0)) + test_observation_3 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-04-04") + .add_property(test_prop_2, value=0).add_property(test_prop_3, value=test_person_2)) + test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) + .add_property(test_prop_4, value=[test_person_1])) + test_conference_1 = (db.Record(name="Official Conf", description="For everyone").add_parent(test_rt_2) + .add_property(test_prop_4, value=[test_person_0, test_person_1, test_person_2])) + testdata = [test_rt_0, test_rt_1, test_rt_2, test_prop_0, test_prop_1, test_prop_2, test_prop_3, test_prop_4, + test_person_0, test_person_1, test_person_2, test_observation_0, test_observation_1, + test_observation_2, test_observation_3, test_conference_0, test_conference_1] + # Insert + c = db.Container() + c.extend(testdata) + c.insert() + except Exception as setup_exc: + print(setup_exc) + + +def teardown_function(function): + """Delete created test data""" + try: + db.execute_query("FIND ENTITY WITH ID > 99").delete() + except Exception as delete_exc: + print(delete_exc) + + +def test_successful_export(): + records = next(db.execute_query("FIND Record", page_length=50)) + tmp_path = Path('temp_test_successful_export.xlsx') + assert not tmp_path.exists() + try: + export_import_xlsx.export_container_to_xlsx(records=records, + xlsx_data_filepath=tmp_path) + assert tmp_path.is_file() + finally: + if tmp_path.exists(): + tmp_path.unlink() -- GitLab From f71dced3cd099f190aeac115f1d3477af783f985 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 8 Apr 2025 15:01:55 +0200 Subject: [PATCH 10/36] REFACTOR: Using new `plain_json` feature of high level api. --- .../table_json_conversion/export_import_xlsx.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index ea18a374..99097b4a 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -107,9 +107,7 @@ def _generate_jsondata_from_records(records: Container, warnings.warn(f"Data for record with id {record_obj.id} might be " f"incomplete, unsuccessful retrieve.") # Get json representation & adjust layout for compatibility - raw_data = record_obj.serialize() - raw_data.update(raw_data.get('properties', {})) - raw_data.pop('properties') + raw_data = record_obj.serialize(plain_json=True) if record.parents[0].name not in json_data: json_data[record.parents[0].name] = [] json_data[record.parents[0].name].append(raw_data) @@ -149,8 +147,7 @@ def export_container_to_xlsx(records: Container, jsonschema_filepath: Union[str, Path] = None, jsondata_filepath: Union[str, Path] = None, xlsx_template_filepath: Union[str, Path] = None): - """ - Export the data of the given records to an xlsx file. + """Export the data of the given records to an xlsx file. Parameters ---------- @@ -171,6 +168,13 @@ def export_container_to_xlsx(records: Container, xlsx_template_filepath : str, Path If given, write the xlsx template to this file. Optional, default None + + Limitations + ----------- + + This function drops any versioning information from versioned references, references are reduced + to unversioned references. + """ # Ensure every record is only handled once by using id as key. entity_ids = {record.id for record in records} -- GitLab From 9307d8fed5c9c558f975d96213b12c85e3743472 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 8 Apr 2025 15:05:57 +0200 Subject: [PATCH 11/36] REFACTOR: Made validation_utils module private. --- .../{validation_utils.py => _validation_utils.py} | 4 ++-- src/caosadvancedtools/table_json_conversion/convert.py | 6 +++--- src/caosadvancedtools/table_json_conversion/fill_xlsx.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) rename src/caosadvancedtools/table_json_conversion/{validation_utils.py => _validation_utils.py} (97%) diff --git a/src/caosadvancedtools/table_json_conversion/validation_utils.py b/src/caosadvancedtools/table_json_conversion/_validation_utils.py similarity index 97% rename from src/caosadvancedtools/table_json_conversion/validation_utils.py rename to src/caosadvancedtools/table_json_conversion/_validation_utils.py index f1e77f48..5dfd171e 100644 --- a/src/caosadvancedtools/table_json_conversion/validation_utils.py +++ b/src/caosadvancedtools/table_json_conversion/_validation_utils.py @@ -30,8 +30,8 @@ from typing import Union import jsonschema -def _validate_jsonschema(instance: Union[dict, int, str, bool], - schema: Union[str, dict]): +def validate_jsonschema(instance: Union[dict, int, str, bool], + schema: Union[str, dict]): """ A table_json_conversion compatible variant of jsonschema.validate(). Accepts instances with datetime instances and None in not-nullable entries. diff --git a/src/caosadvancedtools/table_json_conversion/convert.py b/src/caosadvancedtools/table_json_conversion/convert.py index 33432b85..dc2126d7 100644 --- a/src/caosadvancedtools/table_json_conversion/convert.py +++ b/src/caosadvancedtools/table_json_conversion/convert.py @@ -36,7 +36,7 @@ import jsonschema from openpyxl import load_workbook from openpyxl.worksheet.worksheet import Worksheet -from .validation_utils import _validate_jsonschema +from ._validation_utils import validate_jsonschema from caosadvancedtools.table_json_conversion import xlsx_utils from caosadvancedtools.table_json_conversion.fill_xlsx import read_or_dict @@ -329,7 +329,7 @@ class XLSXConverter: for e in exceptions]) raise jsonschema.ValidationError(mess) if validate: - _validate_jsonschema(self._result, self._schema) + validate_jsonschema(self._result, self._schema) if self._errors: raise RuntimeError("There were error while handling the XLSX file.") return self._result @@ -564,7 +564,7 @@ class XLSXConverter: value = False if value == 1 or isinstance(value, str) and '=true()' == value.lower(): value = True - _validate_jsonschema(value, subschema) + validate_jsonschema(value, subschema) # Finally: convert to target type return self.PARSER[subschema.get("type", "string")](value) diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index 1f39f66d..e6268fd6 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -33,7 +33,7 @@ from jsonschema.exceptions import ValidationError from openpyxl import load_workbook, Workbook from openpyxl.cell.cell import ILLEGAL_CHARACTERS_RE -from .validation_utils import _validate_jsonschema +from ._validation_utils import validate_jsonschema from .xlsx_utils import ( array_schema_from_model_schema, get_foreign_key_columns, @@ -360,7 +360,7 @@ validation_schema: dict, optional if list(validation_schema['properties'].values())[0]["type"] != "array": validation_schema = array_schema_from_model_schema(read_or_dict(validation_schema)) try: - _validate_jsonschema(data, validation_schema) + validate_jsonschema(data, validation_schema) except ValidationError as verr: print(verr.message) raise verr -- GitLab From b3d60be7a70763348b12448b0cbdcdde0619ac06 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 8 Apr 2025 15:14:00 +0200 Subject: [PATCH 12/36] REFACTOR: Linting, PEPping, styling, docs. --- src/caosadvancedtools/json_schema_exporter.py | 7 ++++--- .../table_json_conversion/export_import_xlsx.py | 10 +++++----- .../table_json_conversion/fill_xlsx.py | 5 ++++- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index bce3102e..fcccd298 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -124,8 +124,8 @@ class JsonSchemaExporter: that the exporter may fail if this option is activated and the data model is not self-sufficient. use_rt_pool : models.data_model.DataModel, optional - If given, do not attempt to retrieve RecordType information remotely but from this parameter - instead. + If given, do not attempt to retrieve RecordType information remotely but from this + parameter instead. multiple_choice : list[str], optional A list of reference Property names which shall be denoted as multiple choice properties. This means that each option in this property may be selected at most once. This is not @@ -341,7 +341,8 @@ ui_schema : dict rt = db.Entity() if isinstance(rt, str): - raise NotImplementedError("Behavior is not implemented when _no_remote == True and datatype is given as a string.") + raise NotImplementedError("Behavior is not implemented when _no_remote == " + "True and datatype is given as a string.") subschema, ui_schema = self._make_segment_from_recordtype(rt) if prop.is_reference(): diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 99097b4a..638b2c32 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -25,7 +25,7 @@ import json import tempfile import warnings import logging -from typing import Union +from typing import Optional, Union from pathlib import Path import linkahead @@ -44,7 +44,7 @@ logging.disable(logging.NOTSET) def _generate_jsonschema_from_recordtypes(recordtypes: list, - out_path: Union[str, Path] = None) -> dict: + out_path: Optional[Union[str, Path]] = None) -> dict: """ Generate a combined jsonschema for all given recordtypes. @@ -52,7 +52,7 @@ def _generate_jsonschema_from_recordtypes(recordtypes: list, ---------- recordtypes : Iterable List of RecordType entities for which a schema should be generated. - out_path : str, Path + out_path : str or Path, optional If given, the resulting jsonschema will also be written to the file given by out_path. Optional, default None @@ -78,7 +78,7 @@ def _generate_jsonschema_from_recordtypes(recordtypes: list, def _generate_jsondata_from_records(records: Container, - out_path: Union[str, Path] = None) -> dict: + out_path: Optional[Union[str, Path]] = None) -> dict: """ Extract relevant information (id, name, properties, etc.) from the given records and converts this information to json. @@ -87,7 +87,7 @@ def _generate_jsondata_from_records(records: Container, ---------- records : Iterable List of Record entities from which the data will be converted to json. - out_path : str, Path + out_path : str or Path, optional If given, the resulting jsondata will also be written to the file given by out_path. Optional, default None diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index e6268fd6..b92adc10 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -331,7 +331,7 @@ to_insert: Optional[dict[str, str]] def fill_template(data: Union[dict, str, TextIO], template: str, result: str, - validation_schema: Union[dict, str, TextIO] = None) -> None: + validation_schema: Optional[Union[dict, str, TextIO]] = None) -> None: """Insert json data into an xlsx file, according to a template. This function fills the json data into the template stored at ``template`` and stores the result as @@ -355,6 +355,9 @@ validation_schema: dict, optional # Validation if validation_schema is not None: + validation_schema = read_or_dict(validation_schema) + assert isinstance(validation_schema, dict) + # convert to array_schema if given schema is a model_schema if 'properties' in validation_schema and validation_schema['properties'].values(): if list(validation_schema['properties'].values())[0]["type"] != "array": -- GitLab From 3104a05c5442311c96512b943c6659b676137d1c Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Wed, 9 Apr 2025 16:04:04 +0200 Subject: [PATCH 13/36] ENH: XLSX export works now with IDs. --- .../data/multiple_refs_template.xlsx | Bin 0 -> 8448 bytes integrationtests/test_ex_import_xlsx.py | 141 +++++++++++++++++- src/caosadvancedtools/json_schema_exporter.py | 64 +++++--- .../export_import_xlsx.py | 50 +++++-- .../table_json_conversion/table_generator.py | 41 +++-- 5 files changed, 249 insertions(+), 47 deletions(-) create mode 100644 integrationtests/data/multiple_refs_template.xlsx diff --git a/integrationtests/data/multiple_refs_template.xlsx b/integrationtests/data/multiple_refs_template.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..70a60534144af8f115f6e1030eb6066d168aabd1 GIT binary patch literal 8448 zcmWIWW@Zs#U|`^2u*vU>ay#zpF_nRV;Vc6Kg8%~qLrQ*fKv8}{v3_DffnG&!j_-M2 z?n4GVY!CJam2IEdCAWmn#kOnSp7VO8t=lwnTYkNN%VM8bS$RTwkzS{g+O`?4AN8K( z8XFzwb-1Sd%kK24g<T7uRIf9<+t{;Fihqf`9P{Btxz`(74S6iiJlekD<rO`*^wRUw zoXR&@1*#o>6k%euQC)uZ9T_fx=T)k9-NN3M)3sUucL$#q4W9SxRQ{h7*8S|e#8JIq zxcuXX_Y4dS+Dr@#g2-M-&M!&@`QhxOK<~o_0>}4vJ=wc=S+shKo_?1<R}=Hm?Gx^7 zOPJjDWXav_+k2ziU$B2LEdT%KU3tM;KBw=3^ONNbCLJv?-gQoW;o)hrolOiLzdHPu z7J2<xx%2(vU=!QNj7^v1Uo`}TUuBb-WYi^c=75rpi(2&J*1i=h`qTGZ<hH9hrgyl9 z?ahUS%USk3Iq<~BaL%!Ht=b7~f^O6Iac77nt^3z`RyN`JlNoO&NwqDM39~eRd*I>Q zmmgKvM@P)}sO-JGK81BIQ{K(gPp@CStNVT+CHAUs;9Bl|_X_o9MPJOhmwsKoE<EwN z^zQBdZ~Rcwi2hM3{)rnkbY{lPQ5IlhV3?!Mz#z=Pz)+E+Uy_lUn+hQfLE-Z@D!TZV zg-G4|^%pAAzvXftGi%(oMYuS#?Dm$KEpPqgTLmL}b1G9Nw$$%;HxoELjrZ*&-(9n9 zSae?gKIiKG{Dnp3=XZKZFWNe$e5y#esek+0?(O>b9)ugI2s#=_E?@uQ0M}BVxUEr{ z42LahPYMeizSGpr;awqe`dZ9`XeoW=Yp=H4z8lt(nX^#BJtd(wPA9|RLP%g#e!Rk# zr}D->L{EqGrY>d_4tG@B*|Fu*&zz%wZr!s>J^#{5)~GShKsrL9{#^d@Cw@^K4?_+e zIgpiZAC`Um;w6uUrjxZ(Sa*4TTYOoR!-&_~YHq~2vYKy4-p=nib*scASL(szj};c% zPG9OSkYRZcxO?8AxnFYjFLC7DvN-cX%8hTg)=gZtsyEpG^7q>NuV-h>;p&a{znoou z_i|t7v9%W}c=vtuZ$G@Q^VsSOC9gBp)c>x_Uh+~b!^!RO>nb79txp{;D=<V9lwCV> ziFsO_=C9HzQa5FH-*t@n5b@5kQ}Exhqb%1lFG)@_aVwfTft8QJ{K!S0fSUD=+qxbc znsg~<!nO07TLYvxW}RsK>fWSy^v?RfU#IUrxx~0?Zmre1>?!q`s~MNS-97vK<UhqP zp3Gl9`MR&uxq>}9oF9KHM<=$c=UEFES;?e3)tvwKZQ@c@qx0{7a=rGmNtX|swjuWX z`6u&wij;TX{T5OC&EGvw`TEKOo0YClaZ_=fe?oWH6G4lup1elF_8fEDW@H7(FfHLU zoVKv-$ChJj1Rn_3Zr{go;(l+(_tp8cY{M5uc;0IHA>bzUXio97;)^-I*_XOq{1&?B z&f@72qKwle?whPK)j4{tV(Nj9Yv<U=3JaZ`!p-K`HeE$cXr*PHuZQqI?WMQ=&AIIG zVT!)WFRzLHnVXfDm#Ce6VJvmfZM}49d&VW3s(-9M=a_QIOq7;oN|)a3rr46X`-9wi zmA+YP4+&~J*(FJuNfm7sYur}2vLzu(=-9;yze`&L|3t1aoottI;?3EKrRT0q<P)3} zXYOQ|bgp}X;C{I_3$Iyk7ZpaZ{GAtf*z@3vrqgMEcht3AHO|rL`|f^4-0KOy+H(7s zMkefwXGL=EZ3)YozpCBDZiSo?7wgBJa&}LvD!9z29orMIuBn>w|HWJ8hgSb+I$&9^ zS9^Zx`<-8G7hc+Ku3Vdb@N#$bJ-58*Bfr-f-nwq0r;!|9boRvYkoA)_Z&=Ph@aAuZ z-Mo))f8A7RSwAaMS6OU=cjIP9l^uV&Pkqoj!ucX4T9xy3$mLY?E}?l&(s?g~zj{r* z{&f9LjU6`8PhPHHSYoGh{-;g+^DSq&jh(j_dB55=W#3jc3$HI5&lLE{o#wGlx0xin zt$cy3>F;l`Wlxls8U#;w-XZ0*k2!bO+&fkl^JYxk$#pqsLjK{OFOOD8J+i#Xva@zZ zc<%EX7cc&PelT#u;`cVwWRG`zTBP`HVcU%Jhn?FitK(;+Pdyf3B9fc4Mn%VMd%}+E zA9X|?q$KDrU~gU@$n+^FG{G?X7_;_-MKwM>t}hCnPmhUzd~wp_+sjh7=B?K(m&{cP zvOJVILpXi)se7p#4~EK~4~_n`VPBT*7E!<R|036(n9n2VC&n%0kvv6wnb)GvlEM3) zYkM9Ewb<`!yiI7j#Gc2`ju-j`?+_~6#?NH5bGPNns#L@DWuNZr@^I=zcSH(3@{N6a z_4({M>#CH>eoZewnDFUu!AW=ir`FHozr;GPtNV8M@vpqe)*e1{Dvt^>mbfx0PnBU4 z6E}Ji@NUusHetrIQ%<^kU}{QPCetwE7x&&zJUcZ-&#kqytasIEZu5(~q$(uRwSk9K zQM~B!`}dQx%6qQ=(rL_X2{B5!Dt^kj^PsJ9k@JlZ)z9m0sd2a7eiU+cx70qh1n<LD zd2@Bu|KE<gV!K7$*6s7+nGfVPw-?W@+HuHr&HspNazBmkTNd25y2l*%OjyTxN!`KP zuC4~b)k*tyt@r<_I>&lT^~Xkq*?+FxUh?hk1D={ItD}~tXbakVv)9N^VoUCc{$di} zFOsx&*~YcUZW;1aHbi{pS(s*Vzuk3~l=0Q>HI}_x`Hoi|p4#8O!kh7p*F<I_Yn<WC zZDxy~_;K0X`S+f8!=WR658rn0XGE<)B%9UkVwo8ju8K1-NI)u(^8BLg;*8YPl45-@ z1+GQb&bXcT$UvYiUiB2SKvhz>s6ybm=Myf(z6}-aPz;+qqtxf+zQ4B~2541g9DBN~ zcka)+YMt}>k6!hCyso3<67k-1PMWE;P}1iY7B`KQJSR-O(YEB%*{+W&0-H`=_uKYR zg`-%bbMEb;osU!)ecx7@zAvuadup4~GR?xBQ*$S)JorD)<4l?9?_+jl6(zEqS07$k zJv+g-Mroe8-gMpPON1`9ERjrIdGzF}H1j;Uzg=DDnd<^Kojx~Dck0Qejp0{cw43f; zqvn#dNyJNW!vA|7laFi^nsoU`^@T0|&RQS-{9JMGWbyQ!I!!$$k$+~lDXqF`pR291 zIzgJd|83Qi3Fn(HMQ^e={Xa^O*Ll*j&iCJLoSmDiyM1QDNk{j6UN7uaIPdX$KYFA3 z>0@*w$E(nDWn$+S+nGF-Smpmz`S`MxQN{5EH~m$0UY&jUBK6<#+;CY%)zJByZ4CSc z9-B)qZd<N6D`Qcs+O3R5&1$<c7PYJW%1CNZYs*M#Q9G8A)TFj7<B+ScvB|@Qobt{V zo96hN)~;UtGTPy6?wRUcRc()+$5a{nUf<PamJr3}bGzZCgh4Fxvh)LAcxN;+8k;1z zv7NcmFjK-HnE9A#LK<67`hg!7&w|h1f0^}<FY<GM4R7gHKG)@rP9;<Mj<z>mIJ$k| z58eJ!R=(Es+q@6+O_F26AGw8`?|3C0$lUQr<+j1k;xfV4Vfh(ty=OLTlr;NU<r<eE zCFs|>?$3VL-S-!zvkL8NsJ?%=Hu=xw^gFL@9?LzKl+}oPEg7pJ^`S>jWdE&WAGsO6 zDtgp41kEkw39#guXxZaaB#~S^V?ogjrQ!nLq8SU9JyBf#M0xp_%;)xVw*6*aaOu6@ zvU%T}#g)FatBCN*P71Io@o@ThK#{Yxeny;U{0>%!6>Z!L#82;J;14r9GEZ`;-NQsd zp;bSW-W+|Uuf$+=@cQdg8|g1A{RNpO#4D)iMR_!~?wHfl9@wykZE@g{IxY3zD-+%> z=M?<h5s?+qy;etYzU`(In=LGx#g0T3DgAZcZnxIE>QqTz)uRu*L6>%3P@R~bzr=Wo z_H)BMo9=x%Rdw&{mv7N0Eq9*|6~FhmVSVPTNW;jJg{PLv7hV4JSM}3A#T#n--^Cgz zMr_*uQ`OO92iJu6;XR$n&UYo|=Y4wpA1zCA%({2ag^7Vdmjh>(Gy*m3=7z=Q-!c&S zs~r~itL>*o&MGGrsY_dfwksvf{$?`siP}b~+cR_Be!tIFpAdNRok-jP`R8+H7bg{S z@;lkB7IIiD9#&||@3r=|bWdm$lX8|q)`K*4@kxTY%QhWYHO*mD+Yv1rIllB6f~T4_ za;}9LaUG2dpK{nsX={4ViR;lTjwH(UG+jP4OKj;jZVRsLpaX1QzE`Q9<tf*aOg$hf zD&u+L`N5=;$!B!-G5%Rl5RjgnGWShO#kR@qIdVL2nH^bIZsR&q^S4!E)hVu-$A1)G z;n_85T7{k6%KN7GdZ)_<3Ut3xKU-qSZ^y3DJm-z}Ua|ISi(fON_J)M4`st#2*62a+ zM`v$oU#2PVri#5;k-SYJSFvZ0jPk*?F2VJlCY?^2d>&>i>Srx$i``u3lY1)nnY8X% zE7KI&qyGK-x9<=0?ASVSOSXk=QC{tw-OEcqeRw*%S?IUM?23;&BCI7&-|Jbz&pg{_ zSHiJ%3CHBNUQMcg&vmr@zE|-9P5qdaF$W^!4tV}5yq9WRm}>lTW-q_Lak!S<m&rOD z%e!uPzw~<M6nAO<cfEz#;ZE283U$?1q&s=b&k|<SYqzV>`4)5fI{z78Yxm&(ww)iI z-P(O`SK^9i;m;3mHZPE8inlCRu-l?9Aph@w<C$F!X?vMJ8j3JvNx!_RIaPbQw|spx z(<Z-(CnlObHeKSB=E1#%dCIK5)r!TDhth2inO$1%A~r=k#4+y`5Bq!{pG~=bZ(X`~ zRW7t%=qsFM-SzUhg6@Gg&z)XeK2o;moxIZ*Hq^=>ywycyA`=6{N^YDf&lsHYrp1F& z-qHPCQ>X7U74e$6s=-BZ$<$q<jJLNeKc!-RWR<Y-?VfkH-&--VO}cxwvhl{w{f~dl z`Lpw4+zInghKx^fpS*aRonF7Kl~}c<VbZGXjjdegpEykmc^r`Ev~{Y(W+|=88*fYA z9h$UBOTzW*otQqQjhE+y8ccV2dur0D`}_xv)=Au9Io`>7TjZ8c%i;`g^9?M2ORc9{ zaQSOVYA+1(32||<<2h#j{p5iw*IDmNyt#VQ(9k-P`(qlL|4oAzGq{=fR+@2%{I5I6 zVqxx7A!*M)>tLoQ^E`R^tLb0w73ycNN>KVyKJ8_aL=Be*o8{fYy#l}8E#558{}OaC z^shJ9wA6sB()Z18`06XX^vevCEBO#|a@M3t`{Z>vr8}$sYF;f7@!Y7sY2B~Yo!K)i zH_OM&4n6jH=`@S;0@oIwJpK9gtK&9(PQ8=T^nCcwG<VP3tMh32lWx|TKa_*TH_hpp zf1I0Juv*wSU+JTwlgYm)b=RNOU0-M>zg{l)-0oN5Li3fva&xY2PgtfqV+X5o_Cb}p zGp=m5(QLNod1AimY~J~PRoz7CC_~q?(#NJbc<bezJR4QiX<t6^#9G~n(feK0)>UOI z1uwVqNd10g_KZs5PX5A&3HP5R+&_PC<?4H{65gy@m%(Xf9`p5Q-{)}lDLWK$bS3^D z=-)k=vz(9h&%FoX$G>GLFMRd>NxS}x?BA3B&HwPTIWc_JF1<VR+E(A1+P3=NVUw5( z`!7k%xg=4s{GpNvL*}n@s~4{h-{xTRqdO!gX`^>eD1+fL!HL4X$GJUP&vb~T@Twfm z3wKWYtZwuAypQnjnWuPHi#(k0*lO-;#(V!aW%A5!6ubTFT@-gOZ^GrXiTb>kL>~N% z;i#3Fq4(j+-2Q(IsLA-=$|*LHpaPT=XEHW{Bx7&^+V;O|>g;=hc3iVp33Re;3VRjC zl5=^T*5o+?tNrHOOxyCm?l@;k$flWxcsu*=f1fkQ{a9Inz^Bls472u#?g<IjzrJhN z=h~@H0s^iIi%zka%o!Qb>%8-GsA}V~tt%h=+i-vW57yMB;%ucknui_Un5L`hs0RLC zwBXZUxg8chdmGES*4)s$uJ)ka(Y1U7+uyxWUNS#(r%e$J&{FeKQk-42`CzP#;jgF< z%?BOl?iId#GOQ!qxWZ?7-2EDX0*@^x9X@_!S9)+q<<R*bwP#*#Rqg%w=kFBTb=Ak? zwuLyV{P3Uls^(SgHxDk!yTyBj+Sx6>8JqnS0wv<2Y0Cnx^~y`#PIxjgW&IY7zRH4C z3B5}e<h!fXO<uIle)+7nMiF+U?5q3dL>xZiV;?blYTM5!?;c~5*9KK}RX6`c=`qD8 zdkKG@y!iROr=9b|yN~PN*O_)AHPU_a@|&j~%_uL^v9f-o$0x73O{3jxinIH+r#YV! z4!^jRukuedV&~?<J%^0<AKJX*kZ}3ung^DfKFX9nd3kB`6}L-E*R8mslPIdYmG@cc zwxgxnc5Hsjd^KA7>g~J`<>~jlO5X)d-@+g*KHcti@xMm~NA-l~S@py}Fw9fzk(Dg? ze#rLBwjCE1zS8~jNa+2|>$6tRnY+;YZPJTN@hLBi{`Nmyl4|kt%!Q``3`=%;+?nHW z@5X^0e`d>cyqw(8lTj1zsFdQhMIl;&^PYvtl5NxeY&O0Wqp~k(XYx`d8KX6q-`6^M zY_gm)yHPB3FWWY!$BK96tf-jJn8{)>pL3x<bFt8N^CchjQIq5R^(@k%ObiUNxRawP zS;_I^@!#_ve?D=8kI!k}DjA2#+nc6my?PoIUcPVrBpI$%Rg0R0(v1|kS8dcT-?vKT z!WJ{pJM!Ot3-4dB=F6c4a?z&@1gg(Qa(a7S*{89hW<R?gUvU0|xolxM)3c3{lH;47 zxl^zw$E}?!Q?!q)erdBTczwPYi;PgNQox@-jT{!{jRlhX`wjP+Ej66C-T&g_U1e)h zy+vJ`{->-wxBOmu|B?w8ZLez=Rx~8rCJXBe#$4U6lAg(x8vA46MqW2L&u?Dck0jWa zb6xY``c>xg$XxN&pW;=S5*~#Lo5KEfcUqpAv68)b^Cg4$?9RtLXRUhX&5twNKdt+~ zb%{$oKTlpPe*fvn`PD}sPtO<gezIxJ;+vOqQVY*mTZ_$=EfjC>7m1$YEPZOB@~xt6 zl^YUEUYZO27g}@YrbXPtneh)JV;*+yuC)6gbF-q)>Ql+f$SmcTLD`$5CYVjQtK0o< z+K$OjnU6+GAHAI?0!oU7kfg|uNQ(E2U`bK%!<IwFr?<_#u<(`g=SS1t-TXdHdoHe| zSZRTj6jy?hV$OO6K_k_u3Az(l;w7YlqCNlJoSPxX7r*k(#vnmH$!9OW+bOEtj5+4( z-o5G`bM(TGmvZw0YW6pnFwHsNa$$Mj&dS^3fj`g&Q?0W0a2J6pcrFG8UPy1RxTG>C zwHTEC?uNx?-v+fOkJ%@9C8a%@$YSNZU_-Cd?+xLqlVlt1Vj}fU*8MF}J?<HG_tMuw zzN43Ht)AC$m0#UG`AI+fx!AIhpzp8$Jc#+;JE5-rLD#o<pW|EIrIJNU>$0OSl}`)4 z#_Z|2X~tQvMJq1+Ok2vnHFVwiZ)q3u=AJLw6lv-fuk+bP>QCURif>L&t=6v;vDQ8m zxvIs+^)nMswQ}#-IsC26H=}>q3(h+(>tgvzV_`3=a;My()!!F6oziSj>Tf%{YVo<k z)!XZYEe`%*J9X^q<&~cWw@jP&tb6^>cFw&Mn;K7V<C}S-?D=V<+rOT_(>oCpsDAOo zr-|=-Qt#Xo(Vu0eeNy`Q%djo%71oyp8Ri<F(x0(l?L`xnva}v<{oT)l?40h`o15P{ zuGY=FC;h^1o>%2@zZ#CEPU)1sus^K(R`{lF|C4ERD-7TLez5ss;(r^7<MKNBM-1Qe zpMCP5`BnG%)sqe9uU>R_O3tP<gY&Qdh{p(5PCQZ8Ji+fpVYxt0;ooINarZ9H-QRBh z>z~bIUfCtpvksKjMgD){!?S9ZpRGgG<CUj>vHP6ZGe>XocU6<e?&n?=zE1y?X7QN0 z<@yOTn@pC+E;_voZ9fkx9k7_#`t#@fPmzm$y6|-8&UBfkk=|LPq}_VuVf#<dVy*v6 zjMuJOq~X10QR&>C1;<{6P5UrC>~In@+cS2XztL+y<~<hv^iL+iwl?2sgU?6LBSuHF zc|CUJ9ayodWV@mLfdji5&hq{6|J^v#tGJ74SJ%<Dy9qKYwRjpMea>e@DqYolIQ<=e znRnNsRhKvtUUGCXIX^Ia#39Cbq(RYDa9{KKeeO^G%vG9R?`K|**6*3#wOi%@0|SFF z69WS`0|P^RQEE=Hz8;92J85U$VFMnA`lA=-8L(ea>@ZIKD8}-WF>De44I$Pp*SY)G z-&$HYfm^cD)<)=#T&BZYizVB-S4#X92+EAQx`WYUn`(dNxrt8euIR{_wqJ5^xt;91 zAxg*2K~FMm``#3><;6?FJ7TWSU21z<-`_F6VZD`4p;M-Zw=v&`vzO|PzJKO1b$#lk zJT+RX;_}mfWe+?rE>wyTo8@})@*>%#^|HsmO}PAWeX`P)s^}L#3o}Bv`tBsF_7o;< z`Dfa6lK)K|TKlBOc1}YXBLl-E76t}>NG=48xg_Q1XM^(L(J8n64jb^a#V<X@yyo@| zHN(I>2N@L>VTE^_uf^&Xt&quQo+qas>MD5Y2=8O9kMFPi{G<Ku=Do|WZ~eah=B$)B z*Hwe<r)~ICnZw#A<{$K2_ASNK+hexP7M5LHybRHk{f|F(%`>$st-E$TD$tI{#`)xn zxgCKOjyKxZ*YIqXWs6ztSf?LyOFCetYrB~J0<9|%sS7{;miGVgP&WH>$RzV+0h9GU zG98KdH(|cglg)AMOE$#WXEADCNz!4>UUJ~2%c}LQh3rK;y0^Vqf5G6w(zG3F`*Sbm zCP}_jlHW41=Ee1MM^yhFbBn(oacHUjb}^Z9JE0jIFPxfl17=<p<NE6JXkCn4^d#%N z)$44<)-^_Maq+y6JC)=AytFS4(r*ox2s3Ya$++dE{es>whlkP+Pcj~wtSq?AYF~KH z*2j!D-!R;K<B=j`?!dh2C*!J0srEWHtrbsizlzghyO=6>S~RuLv%k!0w!dHMyKTo} zI=(%4w`bk2iLdI}pU+3Fvs*1a<{oEYV6b6fV32}D5hOVxM-(_|9-ZRL*JL2zw%)0V z!Hv<-@W3hF=*O(HtO5@C&ps#d@!KMu1)bdHZK0oiFMr>A)?{~r`c6aUm&bw(zTI`$ zdEv}A76-3neN7Xw_Ls|y)=jr@4R4;hYO1NzmfK4oT;mB$$xbV%jr!A?v1(Odh1sin zj}OEj_+YS3wfBV54zF<K=S;`)B!z4GYAq(Z2HY!pc<OfCfoo0^)kSjkMXH@Xesk(n zpOT|r@}Y&VUPP&`%%l74oV~xk^nbP4dvxhv;pYqYyB*)oTruDEBNM9k1MdDmt<A{5 z@Sc?cJgpS%oS#>cnpYAZQdy8%91BXNXQ%l3gHq{#udA=Gr1kVON0l?K+rV6}u;rbU zX?)k-i+`){l+9+55)D?r=)LI3x8Hj<F4ejJUCtn@XWNXUq7&;ym_Bp$9Jx}tK}Ivy zVA2l>vqPu%Op_Mq{`tr8$vL42uN7B2!cV7bJPix}a_NJqR{Ca(hGQ00uY{w+#Jt~c zJ*8S0^CstG^CTG#&G0hI2+e+DzmB7kPSQVR3szL$cH~&qkfF=G*XwZPqKizK>lrU6 zb{fmYMy`_Knx4-)eaQ>v6HM9+MpkcK(+{US;&a}Aujg60k+Z|O820BA)x{nZ-6>Sb zEqxTp!L;(B`n(Pu+h4U8{B}1y>d(o0znr_&oL}Uw*}miV(`zp6zi>QB$u3X*(c?TN zyLbOD9&a!Hx^@48GXLI?Zzsg>+VB6x6yVLsB*Kh)R+a$@8W=$g^f_8|-RKjdAUz=5 zz<3zj^eDP^^ch2tRuFDre8`A({t(>&^buo_W)N;@RN?>|0LmiZfn((HV{}u{2f08d zfN(?OR&K1OfQP)$O+g>90GR;74UPJISWN*BU!a?U-d_Zn0KyH8-vqFl0`5Jcn}XiV z2bloE4UMhBSWN*p`q526Z=r!q0O1Bk0Wq*Ci1r$~R`mK4qz!}{7}rXm)TZbf(d#3S zCJ=65oF)mj6BMJc+6i4VdPxb=2*M4FmeOF&ILb|Qa{|zc9FQ3x+`wod!@v-YSpWri Uv$BB{@G|f+gfTNPOpyig0A`lbzyJUM literal 0 HcmV?d00001 diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 89593e06..d5f7b53f 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -3,6 +3,7 @@ # This file is a part of the LinkAhead Project. # # Copyright (C) 2025 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2025 Daniel Hornung <d.hornung@indiscale.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -17,18 +18,110 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. +"""Test export and import to and from XLSX sheets, using the table json conversion package. + +Data is partly reused from the unit tests. +""" + +import json +import os +import sys from pathlib import Path +from openpyxl import load_workbook + import linkahead as db +from caosadvancedtools.models import parser from caosadvancedtools.table_json_conversion import export_import_xlsx +sys.path.append(os.path.join(os.path.dirname(__file__), "..", "unittests", "table_json_conversion")) +from utils import compare_workbooks # noqa: E402, pylint: disable=wrong-import-position + + +def rfp(*pathcomponents): + """ + Return full path. + Shorthand convenience function. + """ + return os.path.join(os.path.dirname(__file__), *pathcomponents) + + +def rfp_unittest_data(*pathcomponents): + """ + Return full path, from unittest's `table_json_conversion` data directory. + Shorthand convenience function. + """ + parts = Path(__file__).parts + inttest_index = list(reversed(parts)).index("integrationtests") + if inttest_index >= len(parts): + raise ValueError("Implausible path") + data_path = Path(*parts[:-inttest_index-1]) / "unittests" / "table_json_conversion" / "data" + + return os.path.join(str(data_path), *pathcomponents) + + +def _create_datamodel(modelfile: str): + """Create a data model from a yaml file. + + Parameters + ---------- + modelfile : str + File name of the yaml file withb the data model. + """ + model = parser.parse_model_from_yaml(modelfile) + model.sync_data_model(noquestion=True) + + +def _insert_multiple_refs_data(): + """Insert the data from `multiple_refs_data`. + """ + json_data_file = rfp_unittest_data("multiple_refs_data.json") + with open(json_data_file, encoding="utf-8") as myfile: + json_data = json.load(myfile) + + persons = [] + organizations = [] + for organization_data in json_data["Training"][0]["Organisation"]: + rec_org = db.Record(name=organization_data["name"]).add_parent( + db.RecordType("Organisation")) + rec_org.add_property("Country", organization_data["Country"]) + org_persons = [] + for person_data in organization_data["Person"]: + rec_person = db.Record().add_parent(db.RecordType("Person")) + rec_person.add_property("full_name", person_data["full_name"]) + rec_person.add_property("email", person_data["email"]) + persons.append(rec_person) + org_persons.append(rec_person) + rec_org.add_property("Person", org_persons, datatype="LIST<Person>") + organizations.append(rec_org) + + participants = [] + for participant_data in json_data["Training"][0]["participant"]: + rec_participant = db.Record().add_parent(db.RecordType("Person")) + rec_participant.add_property("full_name", participant_data["full_name"]) + rec_participant.add_property("email", participant_data["email"]) + persons.append(rec_participant) + participants.append(rec_participant) + + rec_training = db.Record().add_parent(db.RecordType("Training")) + rec_training.add_property("participant", participants) + rec_training.add_property("Organisation", organizations, datatype="LIST<Organisation>") + + cont = db.Container() + cont.extend(organizations + persons + [rec_training]) + + cont.insert() + + +# def _fill_data(): + def setup_function(function): "Create needed test data" try: # Setup data structure test_rt_0 = db.RecordType(name="Person", description="An observant human.") - test_prop_0 = db.Property(name="fullname", datatype=db.TEXT) + test_prop_0 = db.Property(name="full_name", datatype=db.TEXT) test_rt_0.add_property(test_prop_0) test_rt_1 = db.RecordType(name="ObservationRecord") test_prop_1 = db.Property(name="date", datatype=db.DATETIME) @@ -38,6 +131,7 @@ def setup_function(function): test_rt_2 = db.RecordType(name="Conference") test_prop_4 = db.Property(name="attendees", datatype=db.LIST(test_rt_0)) test_rt_2.add_property(test_prop_4) + # Setup data test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property(test_prop_0, value="Their Name") test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property(test_prop_0, value="Also Name") @@ -51,12 +145,13 @@ def setup_function(function): test_observation_3 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-04-04") .add_property(test_prop_2, value=0).add_property(test_prop_3, value=test_person_2)) test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) - .add_property(test_prop_4, value=[test_person_1])) + .add_property(test_prop_4, value=[test_person_1])) test_conference_1 = (db.Record(name="Official Conf", description="For everyone").add_parent(test_rt_2) - .add_property(test_prop_4, value=[test_person_0, test_person_1, test_person_2])) + .add_property(test_prop_4, value=[test_person_0, test_person_1, test_person_2])) testdata = [test_rt_0, test_rt_1, test_rt_2, test_prop_0, test_prop_1, test_prop_2, test_prop_3, test_prop_4, test_person_0, test_person_1, test_person_2, test_observation_0, test_observation_1, test_observation_2, test_observation_3, test_conference_0, test_conference_1] + # Insert c = db.Container() c.extend(testdata) @@ -84,3 +179,43 @@ def test_successful_export(): finally: if tmp_path.exists(): tmp_path.unlink() + + +def test_export_list_refs(tmpdir): + """Test the export to XLSX of list-valued references. + """ + # Setup database + _create_datamodel(rfp_unittest_data("multiple_refs_model.yml")) + + # Create initial data, from json + _insert_multiple_refs_data() + + # Retrieve and export all Training entities + query_result = db.execute_query("Find Training") + export_import_xlsx.export_container_to_xlsx(records=query_result, + include_referenced_entities=True, + xlsx_data_filepath=tmpdir / "result.xlsx", + jsonschema_filepath=tmpdir / "schema.json", + jsondata_filepath=tmpdir / "data.json", + xlsx_template_filepath=tmpdir / "template.xlsx", + ) + + # Test schema + with open(tmpdir/"schema.json", encoding="utf-8") as schema_f: + schema_generated = json.load(schema_f) + + training = schema_generated.get("properties", {}).get("Training") + assert training + assert len(schema_generated.get("properties", {})) == 1 # All top-level sheets? + for props in (training["properties"], + training["properties"]["trainer"]["items"]["properties"], + training["properties"]["participant"]["items"]["properties"], + training["properties"]["Organisation"]["items"]["properties"], + ): + assert "id" in props.keys() + + template_known_good = load_workbook(rfp("data", "multiple_refs_template.xlsx")) + template_generated = load_workbook(tmpdir / "template.xlsx") + compare_workbooks(template_generated, template_known_good) + + # TODO continue here diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index fcccd298..daa2ebeb 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -76,6 +76,7 @@ class JsonSchemaExporter: additional_json_schema: Dict[str, dict] = None, additional_ui_schema: Dict[str, dict] = None, units_in_description: bool = True, + plain_data_model: bool = False, do_not_create: List[str] = None, do_not_retrieve: List[str] = None, no_remote: bool = False, @@ -111,6 +112,12 @@ class JsonSchemaExporter: description of the corresponding schema entry. If set to false, an additional `unit` key is added to the schema itself which is purely annotational and ignored, e.g., in validation. Default is True. + plain_data_model: bool, optional + If True, represent references as plain objects, without the option to choose from an + enum list of existing entities. Exception: When the reference looks like it *should be* + an enum, the existing Record entries are given as options. This parameter should be set + to True when one needs a generic representation of the data model. + The default is ``False``. do_not_create : list[str], optional A list of reference Property names, for which there should be no option to create them. Instead, only the choice of existing elements should @@ -161,6 +168,7 @@ class JsonSchemaExporter: self._additional_json_schema = additional_json_schema self._additional_ui_schema = additional_ui_schema self._units_in_description = units_in_description + self._plain_data_model = plain_data_model self._do_not_create = do_not_create self._do_not_retrieve = do_not_retrieve self._no_remote = no_remote @@ -262,24 +270,18 @@ ui_schema : dict if inner_ui_schema: ui_schema["items"] = inner_ui_schema elif prop.is_reference(): - if self._use_id_for_identification: - json_prop["type"] = "object" - json_prop["required"] = [] - json_prop["additionalProperties"] = False - json_prop["title"] = prop.name - if prop.datatype == db.FILE: - json_prop["description"] = "Path to file" - json_prop["properties"] = {"path": {"type": "string"}} - else: - json_prop["properties"] = { - "id": {"oneOf": [{"type": "integer"}, {"type": "string"}]}} - elif prop.datatype == db.REFERENCE: + # We must distinguish between multiple kinds of "reference" properties. + + # Case 1: Plain reference without RecordType + if prop.datatype == db.REFERENCE: # No Record creation since no RT is specified and we don't know what # schema to use, so only enum of all Records and all Files. values = self._retrieve_enum_values("RECORD") + self._retrieve_enum_values("FILE") json_prop["enum"] = values if prop.name in self._multiple_choice: json_prop["uniqueItems"] = True + + # Case 2: Files are data-url strings in json schema elif prop.datatype == db.FILE or ( self._wrap_files_in_objects and is_list_datatype(prop.datatype) and @@ -316,11 +318,13 @@ ui_schema : dict else: json_prop["type"] = "string" json_prop["format"] = "data-url" + + # Case 3: Reference property with a type else: prop_name = prop.datatype if isinstance(prop.datatype, db.Entity): prop_name = prop.datatype.name - if prop.name in self._do_not_retrieve: + if prop.name in self._do_not_retrieve or self._plain_data_model: values = [] else: values = self._retrieve_enum_values(f"RECORD '{prop_name}'") @@ -341,8 +345,9 @@ ui_schema : dict rt = db.Entity() if isinstance(rt, str): - raise NotImplementedError("Behavior is not implemented when _no_remote == " - "True and datatype is given as a string.") + raise NotImplementedError("Behavior is not implemented when " + "_no_remote == True and datatype is given as a " + "string.") subschema, ui_schema = self._make_segment_from_recordtype(rt) if prop.is_reference(): @@ -350,6 +355,14 @@ ui_schema : dict subschema["title"] = prop.name if prop.description: subschema["description"] = prop.description + if self._use_id_for_identification: + subschema["properties"]["name"] = { + "type": "string", + "description": "The name of the Record to be created"} + subschema["properties"]["id"] = {"type": "string"} + subschema["properties"].move_to_end("name", last=False) + subschema["properties"].move_to_end("id", last=False) + # {"oneOf": [{"type": "integer"}, {"type": "string"}]} # if inner_ui_schema: # ui_schema = inner_ui_schema @@ -427,9 +440,9 @@ ui_schema : dict vals = [] for val in possible_values: - if self._use_id_for_identification: - vals.append(val.id) - elif val.name: + # if self._use_id_for_identification: + # vals.append(val.id) + if val.name: vals.append(f"{val.name}") else: vals.append(f"{val.id}") @@ -472,8 +485,8 @@ ui_schema : dict props = OrderedDict() if self._name_property_for_new_records: props["name"] = self._make_text_property("The name of the Record to be created") - if self._use_id_for_identification: - props["id"] = self._make_text_property("The id of the Record") + # if self._use_id_for_identification: + # props["id"] = self._make_text_property("The id of the Record") if self._description_property_for_new_records: props["description"] = self._make_text_property( "The description of the Record to be created") @@ -489,6 +502,14 @@ ui_schema : dict if inner_ui_schema: ui_schema[prop.name] = inner_ui_schema + if self._use_id_for_identification: + props["name"] = { + "type": "string", + "description": "The name of the Record to be created"} + props["id"] = {"type": "string"} + props.move_to_end("name", last=False) + props.move_to_end("id", last=False) + schema["properties"] = props return schema, ui_schema @@ -789,7 +810,8 @@ data_schema : dict title = schema.get("title", str(i)) sub_schemas[title] = schema if return_data_schema: - data_sub_schemas[title] = {"type": "array", "items": schema} + # data_sub_schemas[title] = {"type": "array", "items": schema} + data_sub_schemas[title] = schema required.append(title) if rjsf_uischemas is not None: if not isinstance(rjsf_uischemas, Sequence): diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 638b2c32..8f16d5ac 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -28,9 +28,12 @@ import logging from typing import Optional, Union from pathlib import Path -import linkahead -from linkahead.common.models import Container -from linkahead import execute_query +from linkahead import ( + execute_query, + Container, + Entity, + LinkAheadException, + ) from ..json_schema_exporter import JsonSchemaExporter, merge_schemas from .table_generator import XLSXTemplateGenerator @@ -39,7 +42,7 @@ from .fill_xlsx import fill_template # The high_level_api import would normally warn about the API being # experimental. We know this, so suppress the warning. logging.disable(logging.WARNING) -from linkahead.high_level_api import convert_to_python_object # noqa: E402, pylint: disable=wrong-import-position +from linkahead.high_level_api import convert_to_python_object, query # noqa: E402, pylint: disable=wrong-import-position logging.disable(logging.NOTSET) @@ -64,8 +67,9 @@ def _generate_jsonschema_from_recordtypes(recordtypes: list, """ # Generate schema schema_generator = JsonSchemaExporter(additional_properties=False, - name_property_for_new_records=True, - use_id_for_identification=True) + name_property_for_new_records=False, + use_id_for_identification=True, + plain_data_model=True) schemas = [schema_generator.recordtype_to_json_schema(recordtype) for recordtype in recordtypes] _, data_schema = merge_schemas(schemas, return_data_schema=True) @@ -103,7 +107,7 @@ def _generate_jsondata_from_records(records: Container, record_obj = convert_to_python_object(record) try: record_obj.resolve_references(False, None) - except linkahead.LinkAheadException: + except LinkAheadException: warnings.warn(f"Data for record with id {record_obj.id} might be " f"incomplete, unsuccessful retrieve.") # Get json representation & adjust layout for compatibility @@ -136,9 +140,10 @@ def _generate_xlsx_template_file(schema: dict, The resulting xlsx template will be written to the file at this path. """ generator = XLSXTemplateGenerator() - foreign_keys = {name: {"__this__": ['id']} for name in recordtype_names} + # foreign_keys = {name: {"__this__": ["id"]} for name in recordtype_names} + foreign_keys = {} generator.generate(schema=schema, foreign_keys=foreign_keys, - filepath=out_path) + filepath=out_path, use_ids_as_foreign=True) def export_container_to_xlsx(records: Container, @@ -176,6 +181,23 @@ def export_container_to_xlsx(records: Container, to unversioned references. """ + + # JSON schema and JSON data ############################ + + # 1. Generate json schema for all top-level record types + + recordtypes = {record.parents[0].retrieve() for record in records} + # recordtype_ids = {recordtype.id for recordtype in recordtypes} + # recordtypes = [execute_query(f"FIND RECORDTYPE WITH (ID = {rt_id})", + # unique=True) + # for rt_id in recordtype_ids] + recordtype_names = {recordtype.name for recordtype in recordtypes} + # Generate schema and data from the records + json_schema = _generate_jsonschema_from_recordtypes(recordtypes, + jsonschema_filepath) + + # 2. Generate json data for all entities. + # Ensure every record is only handled once by using id as key. entity_ids = {record.id for record in records} # If indicated, also get and add the records referenced on the first level @@ -191,14 +213,14 @@ def export_container_to_xlsx(records: Container, for element in ref_list: if isinstance(element, (int, str)): elem_id = element - elif isinstance(element, linkahead.Entity): + elif isinstance(element, Entity): elem_id = element.id else: warnings.warn(f"Cannot handle referenced " f"entity '{prop.value}'") continue entity_ids.add(elem_id) - except linkahead.LinkAheadException as e: + except LinkAheadException as e: warnings.warn(f"Cannot handle referenced entity " f"'{prop.value}' because of error '{e}'") # Retrieve data @@ -222,7 +244,11 @@ def export_container_to_xlsx(records: Container, json_schema = _generate_jsonschema_from_recordtypes(recordtypes, jsonschema_filepath) json_data = _generate_jsondata_from_records(records, jsondata_filepath) - # Generate xlsx template + + # XLSX generation and filling with data ################ + + # 1. Generate xlsx template + # _generate_xlsx_template_file needs a file name, so use NamedTemporaryFile # ToDo: This might not work on windows, if not, fix _generate file handling if xlsx_template_filepath is None: diff --git a/src/caosadvancedtools/table_json_conversion/table_generator.py b/src/caosadvancedtools/table_json_conversion/table_generator.py index b8c50e7d..1032ea67 100644 --- a/src/caosadvancedtools/table_json_conversion/table_generator.py +++ b/src/caosadvancedtools/table_json_conversion/table_generator.py @@ -45,7 +45,8 @@ class TableTemplateGenerator(ABC): pass @abstractmethod - def generate(self, schema: dict, foreign_keys: dict, filepath: str): + def generate(self, schema: dict, foreign_keys: dict, filepath: str, + use_ids_as_foreign: bool = False): """Generate a sheet definition from a given JSON schema. Parameters: @@ -76,9 +77,14 @@ class TableTemplateGenerator(ABC): be distiguished by the "name" and "email" of a "Person" which it references. The foreign keys for this example are specified like this: | ``{"Training": {"__this__": [["Person", "name"], ["Person", "email"]]}}`` + + use_ids_as_foreign: bool, optional + If True, use the id (a property named "id") as foreign key, if the key does not exist in + the dict. Default is False. """ - def _generate_sheets_from_schema(self, schema: dict, foreign_keys: Optional[dict] = None + def _generate_sheets_from_schema(self, schema: dict, foreign_keys: Optional[dict] = None, + use_ids_as_foreign: bool = False ) -> dict[str, dict[str, tuple[ColumnType, Optional[str], list]]]: """Generate a sheet definition from a given JSON schema. @@ -91,6 +97,9 @@ class TableTemplateGenerator(ABC): a configuration that defines which attributes shall be used to create additional columns when a list of references exists. See ``foreign_keys`` argument of TableTemplateGenerator.generate. + use_ids_as_foreign: bool, optional + If True, use the id (a property named "id") as foreign key, if the key does not exist in + the dict. Default is False. Returns ------- @@ -122,10 +131,12 @@ class TableTemplateGenerator(ABC): sheets: dict[str, dict[str, tuple[ColumnType, Optional[str], list]]] = {} for rt_name, rt_def in schema["properties"].items(): sheets[rt_name] = self._treat_schema_element(schema=rt_def, sheets=sheets, - path=[rt_name], foreign_keys=foreign_keys) + path=[rt_name], foreign_keys=foreign_keys, + use_ids_as_foreign=use_ids_as_foreign) return sheets - def _get_foreign_keys(self, keys: dict, path: list) -> list[list[str]]: + def _get_foreign_keys(self, keys: dict, path: list, use_ids_as_foreign: bool = False + ) -> list[list[str]]: """Return the foreign keys that are needed at the location to which path points. Returns @@ -137,7 +148,10 @@ foreign_keys: list[list[str]] orig_path = path.copy() while path: if keys is None or path[0] not in keys: - raise ValueError(msg_missing) + if use_ids_as_foreign: # Create entry ad-hoc. TODO: don't modify passed argument? + keys[path[0]] = {'__this__': ['id']} + else: + raise ValueError(msg_missing) keys = keys[path[0]] path = path[1:] if isinstance(keys, dict) and "__this__" in keys: @@ -160,7 +174,7 @@ foreign_keys: list[list[str]] def _treat_schema_element(self, schema: dict, sheets: dict, path: list[str], foreign_keys: Optional[dict] = None, level_in_sheet_name: int = 1, - array_paths: Optional[list] = None + array_paths: Optional[list] = None, use_ids_as_foreign: bool = False, ) -> dict[str, tuple[ColumnType, Optional[str], list]]: """Recursively transform elements from the schema into column definitions. @@ -210,13 +224,15 @@ foreign_keys: list[list[str]] col_def = self._treat_schema_element( schema=items, sheets=sheets, path=path, foreign_keys=foreign_keys, level_in_sheet_name=len(path), - array_paths=array_paths+[path] # since this level is an array extend the list + array_paths=array_paths+[path], # since this level is an array extend the list + use_ids_as_foreign=use_ids_as_foreign, ) if col_def: sheets[sheetname] = col_def # and add the foreign keys that are necessary up to this point for array_path in array_paths: - foreigns = self._get_foreign_keys(foreign_keys, array_path) + foreigns = self._get_foreign_keys(foreign_keys, array_path, + use_ids_as_foreign=use_ids_as_foreign) for foreign in foreigns: internal_key = p2s(array_path + foreign) if internal_key in sheets[sheetname]: @@ -261,7 +277,8 @@ foreign_keys: list[list[str]] for pname in schema["properties"]: col_defs = self._treat_schema_element( schema["properties"][pname], sheets, path+[pname], foreign_keys, - level_in_sheet_name, array_paths=array_paths) + level_in_sheet_name, array_paths=array_paths, + use_ids_as_foreign=use_ids_as_foreign) for k in col_defs: if k in cols: raise ValueError(f"The schema would lead to two columns with the same " @@ -294,7 +311,8 @@ class XLSXTemplateGenerator(TableTemplateGenerator): # def __init__(self): # super().__init__() - def generate(self, schema: dict, foreign_keys: dict, filepath: str) -> None: + def generate(self, schema: dict, foreign_keys: dict, filepath: str, + use_ids_as_foreign: bool = False) -> None: """Generate a sheet definition from a given JSON schema. Parameters: @@ -308,7 +326,8 @@ class XLSXTemplateGenerator(TableTemplateGenerator): filepath: str The XLSX file will be stored under this path. """ - sheets = self._generate_sheets_from_schema(schema, foreign_keys) + sheets = self._generate_sheets_from_schema(schema, foreign_keys, + use_ids_as_foreign=use_ids_as_foreign) wb = self._create_workbook_from_sheets_def(sheets) parentpath = pathlib.Path(filepath).parent parentpath.mkdir(parents=True, exist_ok=True) -- GitLab From 0597de9cb5bf3cd0e277a892ce763491c1d2a566 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 14 Apr 2025 15:34:20 +0200 Subject: [PATCH 14/36] TEST: Testing XLSX export (data) --- integrationtests/data/multiple_refs_data.json | 55 ++++++++++ integrationtests/test_ex_import_xlsx.py | 90 +++++++++++---- .../export_import_xlsx.py | 103 ++++++++---------- unittests/table_json_conversion/utils.py | 40 ++++++- 4 files changed, 210 insertions(+), 78 deletions(-) create mode 100644 integrationtests/data/multiple_refs_data.json diff --git a/integrationtests/data/multiple_refs_data.json b/integrationtests/data/multiple_refs_data.json new file mode 100644 index 00000000..8ddc56e2 --- /dev/null +++ b/integrationtests/data/multiple_refs_data.json @@ -0,0 +1,55 @@ +{ + "Training": [ + { + "name": "Example training with multiple organizations.", + "participant": [ + { + "name": null, + "full_name": "Petra Participant", + "email": "petra@indiscale.com" + }, + { + "name": null, + "full_name": "Peter", + "email": "peter@getlinkahead.com" + } + ], + "date": "2024-03-21 14:12:00+00:00", + "url": "www.indiscale.com", + "Organisation": [ + { + "name": "World Training Organization", + "Country": "US", + "Person": [ + { + "name": null, + "full_name": "Henry Henderson", + "email": "henry@organization.org" + }, + { + "name": null, + "full_name": "Harry Hamburg", + "email": "harry@organization.org" + } + ] + }, + { + "name": "European Training Organisation", + "Country": "UK", + "Person": [ + { + "name": null, + "full_name": "Hermione Harvard", + "email": "hermione@organisation.org.uk" + }, + { + "name": null, + "full_name": "Hazel Harper", + "email": "hazel@organisation.org.uk" + } + ] + } + ] + } + ] +} diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index d5f7b53f..0c896397 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -26,6 +26,7 @@ Data is partly reused from the unit tests. import json import os import sys +from datetime import datetime from pathlib import Path from openpyxl import load_workbook @@ -35,7 +36,11 @@ from caosadvancedtools.models import parser from caosadvancedtools.table_json_conversion import export_import_xlsx sys.path.append(os.path.join(os.path.dirname(__file__), "..", "unittests", "table_json_conversion")) -from utils import compare_workbooks # noqa: E402, pylint: disable=wrong-import-position +from utils import ( # noqa: E402, pylint: disable=wrong-import-position + assert_equal_jsons, + compare_workbooks, + purge_from_json, + ) # noqa: E402 def rfp(*pathcomponents): @@ -103,8 +108,12 @@ def _insert_multiple_refs_data(): persons.append(rec_participant) participants.append(rec_participant) - rec_training = db.Record().add_parent(db.RecordType("Training")) + rec_training = db.Record(name=json_data["Training"][0]["name"] + ).add_parent(db.RecordType("Training")) rec_training.add_property("participant", participants) + rec_training.add_property("date", datetime.fromisoformat(json_data["Training"][0]["date"])) + rec_training.add_property("url", json_data["Training"][0]["url"]) + rec_training.add_property("Organisation", organizations, datatype="LIST<Organisation>") cont = db.Container() @@ -133,24 +142,40 @@ def setup_function(function): test_rt_2.add_property(test_prop_4) # Setup data - test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property(test_prop_0, value="Their Name") - test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property(test_prop_0, value="Also Name") - test_person_2 = db.Record(name="Person 2").add_parent(test_rt_0).add_property(test_prop_0, value="Third Name") - test_observation_0 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-01-01") - .add_property(test_prop_2, value=5).add_property(test_prop_3, value=test_person_1)) - test_observation_1 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-02-02") - .add_property(test_prop_2, value=3).add_property(test_prop_3, value=test_person_0)) - test_observation_2 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-03-03") - .add_property(test_prop_2, value=12).add_property(test_prop_3, value=test_person_0)) - test_observation_3 = (db.Record().add_parent(test_rt_1).add_property(test_prop_1, value="2025-04-04") - .add_property(test_prop_2, value=0).add_property(test_prop_3, value=test_person_2)) + test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property( + test_prop_0, value="Their Name") + test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property( + test_prop_0, value="Also Name") + test_person_2 = db.Record(name="Person 2").add_parent(test_rt_0).add_property( + test_prop_0, value="Third Name") + test_observation_0 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-01-01") + .add_property(test_prop_2, value=5) + .add_property(test_prop_3, value=test_person_1)) + test_observation_1 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-02-02") + .add_property(test_prop_2, value=3) + .add_property(test_prop_3, value=test_person_0)) + test_observation_2 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-03-03") + .add_property(test_prop_2, value=12) + .add_property(test_prop_3, value=test_person_0)) + test_observation_3 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-04-04") + .add_property(test_prop_2, value=0) + .add_property(test_prop_3, value=test_person_2)) test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) .add_property(test_prop_4, value=[test_person_1])) - test_conference_1 = (db.Record(name="Official Conf", description="For everyone").add_parent(test_rt_2) - .add_property(test_prop_4, value=[test_person_0, test_person_1, test_person_2])) - testdata = [test_rt_0, test_rt_1, test_rt_2, test_prop_0, test_prop_1, test_prop_2, test_prop_3, test_prop_4, - test_person_0, test_person_1, test_person_2, test_observation_0, test_observation_1, - test_observation_2, test_observation_3, test_conference_0, test_conference_1] + test_conference_1 = (db.Record(name="Official Conf", description="For everyone") + .add_parent(test_rt_2) + .add_property(test_prop_4, + value=[test_person_0, test_person_1, test_person_2])) + testdata = [test_rt_0, test_rt_1, test_rt_2, + test_prop_0, test_prop_1, test_prop_2, + test_prop_3, test_prop_4, + test_person_0, test_person_1, test_person_2, + test_observation_0, test_observation_1, test_observation_2, test_observation_3, + test_conference_0, test_conference_1] # Insert c = db.Container() @@ -214,8 +239,35 @@ def test_export_list_refs(tmpdir): ): assert "id" in props.keys() + # Check: XLSX template template_known_good = load_workbook(rfp("data", "multiple_refs_template.xlsx")) template_generated = load_workbook(tmpdir / "template.xlsx") compare_workbooks(template_generated, template_known_good) - # TODO continue here + # Check: Data json content + with open(rfp("data", "multiple_refs_data.json"), encoding="utf-8") as myfile: + json_known_good = json.load(myfile) + with open(tmpdir / "data.json", encoding="utf-8") as myfile: + json_generated = purge_from_json(json.load(myfile), remove_keys=["id"]) + assert_equal_jsons(json_generated, json_known_good) + + # Check: Filled XLSX + filled_generated = load_workbook(tmpdir / "result.xlsx") + # For the moment: just check a few samples + + assert filled_generated.get_sheet_names() == ['Training', + 'Training.Organisation', + 'Training.Organisation.Person', + 'Training.participant', + 'Training.trainer', + ] + sheet_training = filled_generated.get_sheet_by_name("Training") + assert sheet_training.max_row == 7 + assert sheet_training.max_column == 19 + assert sheet_training["D7"].value == datetime(2024, 3, 21, 15, 12) + sheet_top = filled_generated.get_sheet_by_name("Training.Organisation.Person") + assert sheet_top.max_row == 11 + assert sheet_top.max_column == 7 + assert sheet_top["B8"].value == sheet_training["B7"].value # Check foreign key component + assert sheet_top["B8"].value == sheet_top["B11"].value + assert sheet_top["G10"].value == "hermione@organisation.org.uk" diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 8f16d5ac..5e986efd 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -81,7 +81,7 @@ def _generate_jsonschema_from_recordtypes(recordtypes: list, return data_schema -def _generate_jsondata_from_records(records: Container, +def _generate_jsondata_from_records(records: list, out_path: Optional[Union[str, Path]] = None) -> dict: """ Extract relevant information (id, name, properties, etc.) from the given @@ -90,7 +90,7 @@ def _generate_jsondata_from_records(records: Container, Parameters ---------- records : Iterable - List of Record entities from which the data will be converted to json. + List of high-level API objects from which the data will be converted to json. out_path : str or Path, optional If given, the resulting jsondata will also be written to the file given by out_path. @@ -102,24 +102,18 @@ def _generate_jsondata_from_records(records: Container, The given records data in json form. """ json_data = {} - for record in records: - # Convert records to high level api objects - record_obj = convert_to_python_object(record) - try: - record_obj.resolve_references(False, None) - except LinkAheadException: - warnings.warn(f"Data for record with id {record_obj.id} might be " - f"incomplete, unsuccessful retrieve.") - # Get json representation & adjust layout for compatibility + for record_obj in records: raw_data = record_obj.serialize(plain_json=True) - if record.parents[0].name not in json_data: - json_data[record.parents[0].name] = [] - json_data[record.parents[0].name].append(raw_data) + + parent_name = record_obj.get_parents()[0].name # We do not handle multiple inheritance yet. + if parent_name not in json_data: + json_data[parent_name] = [] + json_data[parent_name].append(raw_data) # If indicated, save as json file if out_path is not None: with open(out_path, mode="w", encoding="utf8") as json_file: json.dump(json_data, json_file, ensure_ascii=False, indent=2, default=str) - # Return + return json_data @@ -140,7 +134,6 @@ def _generate_xlsx_template_file(schema: dict, The resulting xlsx template will be written to the file at this path. """ generator = XLSXTemplateGenerator() - # foreign_keys = {name: {"__this__": ["id"]} for name in recordtype_names} foreign_keys = {} generator.generate(schema=schema, foreign_keys=foreign_keys, filepath=out_path, use_ids_as_foreign=True) @@ -192,6 +185,7 @@ def export_container_to_xlsx(records: Container, # unique=True) # for rt_id in recordtype_ids] recordtype_names = {recordtype.name for recordtype in recordtypes} + # recordtype_names.add("Sample.Preparation.SourceMaterial") # Generate schema and data from the records json_schema = _generate_jsonschema_from_recordtypes(recordtypes, jsonschema_filepath) @@ -199,51 +193,44 @@ def export_container_to_xlsx(records: Container, # 2. Generate json data for all entities. # Ensure every record is only handled once by using id as key. - entity_ids = {record.id for record in records} + # entity_ids = {record.id for record in records} # If indicated, also get and add the records referenced on the first level # in the given container - if include_referenced_entities: - for record in records: - for prop in record.properties: - if prop.is_reference() and prop.value is not None: - try: - ref_list = prop.value - if not isinstance(ref_list, list): - ref_list = [ref_list] - for element in ref_list: - if isinstance(element, (int, str)): - elem_id = element - elif isinstance(element, Entity): - elem_id = element.id - else: - warnings.warn(f"Cannot handle referenced " - f"entity '{prop.value}'") - continue - entity_ids.add(elem_id) - except LinkAheadException as e: - warnings.warn(f"Cannot handle referenced entity " - f"'{prop.value}' because of error '{e}'") + # if include_referenced_entities: + # for record in records: + # for prop in record.properties: + # if prop.is_reference() and prop.value is not None: + # try: + # ref_list = prop.value + # if not isinstance(ref_list, list): + # ref_list = [ref_list] + # for element in ref_list: + # if isinstance(element, (int, str)): + # elem_id = element + # elif isinstance(element, Entity): + # elem_id = element.id + # else: + # warnings.warn(f"Cannot handle referenced " + # f"entity '{prop.value}'") + # continue + # entity_ids.add(elem_id) + # except LinkAheadException as e: + # warnings.warn(f"Cannot handle referenced entity " + # f"'{prop.value}' because of error '{e}'") + # Retrieve data - new_records = [] - for entity_id in entity_ids: - entity_id = str(entity_id).split('@')[0] # Queries cannot handle version - entity = execute_query(f"FIND ENTITY WITH (ID = {entity_id})", unique=True) - # We can currently only handle Entities with a parent, as otherwise we - # do not know which sheet they belong in. - if len(entity.get_parents()) > 0: - new_records.append(entity) - # ToDo: Handle Files and other Entities (e.g. Properties) separately - records = new_records - recordtypes = {record.parents[0] for record in records} - recordtype_ids = {recordtype.id for recordtype in recordtypes} - recordtypes = [execute_query(f"FIND RECORDTYPE WITH (ID = {rt_id})", - unique=True) - for rt_id in recordtype_ids] - recordtype_names = {recordtype.name for recordtype in recordtypes} - # Generate schema and data from the records - json_schema = _generate_jsonschema_from_recordtypes(recordtypes, - jsonschema_filepath) - json_data = _generate_jsondata_from_records(records, jsondata_filepath) + if include_referenced_entities: + new_records = [] + entity_ids = {record.id for record in records} + for entity_id in entity_ids: + entity_id = str(entity_id).split('@')[0] + new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) + # ToDo: Handle Files and other Entities (e.g. Properties) separately + high_level_objs = new_records + else: + high_level_objs = [convert_to_python_object(rec) for rec in records] + + json_data = _generate_jsondata_from_records(high_level_objs, jsondata_filepath) # XLSX generation and filling with data ################ diff --git a/unittests/table_json_conversion/utils.py b/unittests/table_json_conversion/utils.py index ac76fbea..da3d17cf 100644 --- a/unittests/table_json_conversion/utils.py +++ b/unittests/table_json_conversion/utils.py @@ -69,6 +69,44 @@ Raise an assertion exception if they are not equal.""" assert el1 == el2, f"Values at path {this_path} are not equal:\n{el1},\n{el2}" +def purge_from_json(data: Union[dict, list], remove_keys: list[str]) -> Union[dict, list]: + """Remove matching entries from json data. + + + Parameters + ---------- + data : Union[dict, list] + The json data to clean. + + remove_keys : list[str] + Remove all keys that are in this list + + Returns + ------- + out : Union[dict, list] + The cleaned result. + """ + + # Remove only from dicts + if isinstance(data, dict): + keys = set(data.keys()) + for removable in remove_keys: + if removable in keys: + data.pop(removable) + elements = list(data.values()) + else: + if not isinstance(data, list): + raise ValueError("Data must be a dict or list.") + elements = data + + # Recurse for all elements + for element in elements: + if isinstance(element, dict) or isinstance(element, list): + purge_from_json(element, remove_keys=remove_keys) + + return data + + def compare_workbooks(wb1: Workbook, wb2: Workbook, hidden: bool = True): """Compare two workbooks for equal content. @@ -101,7 +139,7 @@ hidden: bool, optional ) -def _is_recursively_none(obj: Union[list, dict] = None): +def _is_recursively_none(obj: Union[list, dict, None] = None): """Test if ``obj`` is None or recursively consists only of None-like objects.""" if obj is None: return True -- GitLab From 7b869cb1c7fe512290c3618ea37254a8c6f56cf8 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 10:31:22 +0200 Subject: [PATCH 15/36] REFACTOR: Linting. --- integrationtests/test_ex_import_xlsx.py | 1 + .../export_import_xlsx.py | 30 ++++++++----------- .../table_json_conversion/fill_xlsx.py | 9 +++--- .../table_json_conversion/table_generator.py | 12 ++++---- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 0c896397..ae6d922a 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -209,6 +209,7 @@ def test_successful_export(): def test_export_list_refs(tmpdir): """Test the export to XLSX of list-valued references. """ + tmpdir = Path(tmpdir) # Setup database _create_datamodel(rfp_unittest_data("multiple_refs_model.yml")) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 5e986efd..cb719824 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -25,14 +25,12 @@ import json import tempfile import warnings import logging -from typing import Optional, Union +from typing import Any, Iterable, Optional, Union from pathlib import Path from linkahead import ( - execute_query, Container, - Entity, - LinkAheadException, + # LinkAheadException, ) from ..json_schema_exporter import JsonSchemaExporter, merge_schemas @@ -42,11 +40,13 @@ from .fill_xlsx import fill_template # The high_level_api import would normally warn about the API being # experimental. We know this, so suppress the warning. logging.disable(logging.WARNING) -from linkahead.high_level_api import convert_to_python_object, query # noqa: E402, pylint: disable=wrong-import-position +from linkahead.high_level_api import ( # noqa: E402, pylint: disable=wrong-import-position + convert_to_python_object, query + ) logging.disable(logging.NOTSET) -def _generate_jsonschema_from_recordtypes(recordtypes: list, +def _generate_jsonschema_from_recordtypes(recordtypes: Iterable, out_path: Optional[Union[str, Path]] = None) -> dict: """ Generate a combined jsonschema for all given recordtypes. @@ -101,7 +101,7 @@ def _generate_jsondata_from_records(records: list, json_data : dict The given records data in json form. """ - json_data = {} + json_data: dict[str, Any] = {} for record_obj in records: raw_data = record_obj.serialize(plain_json=True) @@ -118,7 +118,6 @@ def _generate_jsondata_from_records(records: list, def _generate_xlsx_template_file(schema: dict, - recordtype_names: Union[list, set], out_path: Union[str, Path]): """ Generate an empty XLSX template file for the given schema at the indicated @@ -128,13 +127,11 @@ def _generate_xlsx_template_file(schema: dict, ---------- schema : dict Jsonschema for which an xlsx template should be generated. - recordtype_names : Iterable - List of all RecordType names in the given schema. out_path : str, Path The resulting xlsx template will be written to the file at this path. """ generator = XLSXTemplateGenerator() - foreign_keys = {} + foreign_keys: dict = {} generator.generate(schema=schema, foreign_keys=foreign_keys, filepath=out_path, use_ids_as_foreign=True) @@ -142,9 +139,9 @@ def _generate_xlsx_template_file(schema: dict, def export_container_to_xlsx(records: Container, xlsx_data_filepath: Union[str, Path], include_referenced_entities: bool = False, - jsonschema_filepath: Union[str, Path] = None, - jsondata_filepath: Union[str, Path] = None, - xlsx_template_filepath: Union[str, Path] = None): + jsonschema_filepath: Optional[Union[str, Path]] = None, + jsondata_filepath: Optional[Union[str, Path]] = None, + xlsx_template_filepath: Optional[Union[str, Path]] = None): """Export the data of the given records to an xlsx file. Parameters @@ -184,7 +181,7 @@ def export_container_to_xlsx(records: Container, # recordtypes = [execute_query(f"FIND RECORDTYPE WITH (ID = {rt_id})", # unique=True) # for rt_id in recordtype_ids] - recordtype_names = {recordtype.name for recordtype in recordtypes} + # recordtype_names = {recordtype.name for recordtype in recordtypes} # recordtype_names.add("Sample.Preparation.SourceMaterial") # Generate schema and data from the records json_schema = _generate_jsonschema_from_recordtypes(recordtypes, @@ -243,8 +240,7 @@ def export_container_to_xlsx(records: Container, xlsx_template_filepath = xlsx_template_file.name else: xlsx_template_file = None - _generate_xlsx_template_file(json_schema, recordtype_names, - xlsx_template_filepath) + _generate_xlsx_template_file(json_schema, xlsx_template_filepath) # Fill xlsx file with data with warnings.catch_warnings(): # We have a lot of information in the json data that we do not need diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index b92adc10..8c2abaf0 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -23,8 +23,8 @@ from __future__ import annotations import datetime -import pathlib import warnings +from pathlib import Path from types import SimpleNamespace from typing import Any, Optional, TextIO, Union from warnings import warn @@ -330,7 +330,7 @@ to_insert: Optional[dict[str, str]] return to_insert -def fill_template(data: Union[dict, str, TextIO], template: str, result: str, +def fill_template(data: Union[dict, str, TextIO], template: str, result: Union[str, Path], validation_schema: Optional[Union[dict, str, TextIO]] = None) -> None: """Insert json data into an xlsx file, according to a template. @@ -375,6 +375,7 @@ validation_schema: dict, optional template_filler = TemplateFiller(result_wb, graceful=(validation_schema is None)) template_filler.fill_data(data=data) - parentpath = pathlib.Path(result).parent - parentpath.mkdir(parents=True, exist_ok=True) + if isinstance(result, str): + result = Path(result) + result.parent.mkdir(parents=True, exist_ok=True) result_wb.save(result) diff --git a/src/caosadvancedtools/table_json_conversion/table_generator.py b/src/caosadvancedtools/table_json_conversion/table_generator.py index 1032ea67..7726d8ce 100644 --- a/src/caosadvancedtools/table_json_conversion/table_generator.py +++ b/src/caosadvancedtools/table_json_conversion/table_generator.py @@ -26,10 +26,10 @@ This module allows to generate template tables from JSON schemas. from __future__ import annotations -import pathlib import re from abc import ABC, abstractmethod -from typing import Optional +from pathlib import Path +from typing import Optional, Union from openpyxl import Workbook from openpyxl.styles import PatternFill @@ -311,7 +311,7 @@ class XLSXTemplateGenerator(TableTemplateGenerator): # def __init__(self): # super().__init__() - def generate(self, schema: dict, foreign_keys: dict, filepath: str, + def generate(self, schema: dict, foreign_keys: dict, filepath: Union[str, Path], use_ids_as_foreign: bool = False) -> None: """Generate a sheet definition from a given JSON schema. @@ -323,13 +323,15 @@ class XLSXTemplateGenerator(TableTemplateGenerator): A configuration that defines which attributes shall be used to create additional columns when a list of references exists. See ``foreign_keys`` argument of :meth:`TableTemplateGenerator.generate` . - filepath: str + filepath: Union[str, Path] The XLSX file will be stored under this path. """ sheets = self._generate_sheets_from_schema(schema, foreign_keys, use_ids_as_foreign=use_ids_as_foreign) wb = self._create_workbook_from_sheets_def(sheets) - parentpath = pathlib.Path(filepath).parent + if isinstance(filepath, str): + filepath = Path(filepath) + parentpath = filepath.parent parentpath.mkdir(parents=True, exist_ok=True) wb.save(filepath) -- GitLab From a869af4113d65243e2ef5b399aef4253063de2b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Henrik=20tom=20W=C3=B6rden?= <h.tomwoerden@indiscale.com> Date: Tue, 15 Apr 2025 17:16:25 +0200 Subject: [PATCH 16/36] TST: use clear_database from pylib --- .docker/docker-compose.yml | 2 + integrationtests/test_ex_import_xlsx.py | 153 ++++++++++++------------ 2 files changed, 77 insertions(+), 78 deletions(-) diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml index 61d455f4..798717ba 100644 --- a/.docker/docker-compose.yml +++ b/.docker/docker-compose.yml @@ -27,6 +27,8 @@ services: - "10080:10080" environment: DEBUG: 1 + CAOSDB_CONFIG__CAOSDB_INTEGRATION_TEST_SUITE_KEY: "_CAOSDB_ADV_TEST_SUITE" + networks: caosnet: diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index ae6d922a..9a9a9a06 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -26,9 +26,12 @@ Data is partly reused from the unit tests. import json import os import sys +import pytest from datetime import datetime from pathlib import Path +from linkahead.utils.register_tests import clear_database, set_test_key + from openpyxl import load_workbook import linkahead as db @@ -42,6 +45,7 @@ from utils import ( # noqa: E402, pylint: disable=wrong-import-position purge_from_json, ) # noqa: E402 +set_test_key("_CAOSDB_ADV_TEST_SUITE") def rfp(*pathcomponents): """ @@ -122,76 +126,62 @@ def _insert_multiple_refs_data(): cont.insert() -# def _fill_data(): - - -def setup_function(function): +@pytest.fixture(autouse=True) +def setup(clear_database): "Create needed test data" - try: - # Setup data structure - test_rt_0 = db.RecordType(name="Person", description="An observant human.") - test_prop_0 = db.Property(name="full_name", datatype=db.TEXT) - test_rt_0.add_property(test_prop_0) - test_rt_1 = db.RecordType(name="ObservationRecord") - test_prop_1 = db.Property(name="date", datatype=db.DATETIME) - test_prop_2 = db.Property(name="amount", datatype=db.INTEGER) - test_prop_3 = db.Property(name="observer", datatype=test_rt_0) - test_rt_1.add_property(test_prop_1).add_property(test_prop_2).add_property(test_prop_3) - test_rt_2 = db.RecordType(name="Conference") - test_prop_4 = db.Property(name="attendees", datatype=db.LIST(test_rt_0)) - test_rt_2.add_property(test_prop_4) - - # Setup data - test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property( - test_prop_0, value="Their Name") - test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property( - test_prop_0, value="Also Name") - test_person_2 = db.Record(name="Person 2").add_parent(test_rt_0).add_property( - test_prop_0, value="Third Name") - test_observation_0 = (db.Record().add_parent(test_rt_1) - .add_property(test_prop_1, value="2025-01-01") - .add_property(test_prop_2, value=5) - .add_property(test_prop_3, value=test_person_1)) - test_observation_1 = (db.Record().add_parent(test_rt_1) - .add_property(test_prop_1, value="2025-02-02") - .add_property(test_prop_2, value=3) - .add_property(test_prop_3, value=test_person_0)) - test_observation_2 = (db.Record().add_parent(test_rt_1) - .add_property(test_prop_1, value="2025-03-03") - .add_property(test_prop_2, value=12) - .add_property(test_prop_3, value=test_person_0)) - test_observation_3 = (db.Record().add_parent(test_rt_1) - .add_property(test_prop_1, value="2025-04-04") - .add_property(test_prop_2, value=0) - .add_property(test_prop_3, value=test_person_2)) - test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) - .add_property(test_prop_4, value=[test_person_1])) - test_conference_1 = (db.Record(name="Official Conf", description="For everyone") - .add_parent(test_rt_2) - .add_property(test_prop_4, - value=[test_person_0, test_person_1, test_person_2])) - testdata = [test_rt_0, test_rt_1, test_rt_2, - test_prop_0, test_prop_1, test_prop_2, - test_prop_3, test_prop_4, - test_person_0, test_person_1, test_person_2, - test_observation_0, test_observation_1, test_observation_2, test_observation_3, - test_conference_0, test_conference_1] - - # Insert - c = db.Container() - c.extend(testdata) - c.insert() - except Exception as setup_exc: - print(setup_exc) - - -def teardown_function(function): - """Delete created test data""" - try: - db.execute_query("FIND ENTITY WITH ID > 99").delete() - except Exception as delete_exc: - print(delete_exc) - + # Setup data structure + test_rt_0 = db.RecordType(name="Person", description="An observant human.") + test_prop_0 = db.Property(name="full_name", datatype=db.TEXT) + test_rt_0.add_property(test_prop_0) + test_rt_1 = db.RecordType(name="ObservationRecord") + test_prop_1 = db.Property(name="date", datatype=db.DATETIME) + test_prop_2 = db.Property(name="amount", datatype=db.INTEGER) + test_prop_3 = db.Property(name="observer", datatype=test_rt_0) + test_rt_1.add_property(test_prop_1).add_property(test_prop_2).add_property(test_prop_3) + test_rt_2 = db.RecordType(name="Conference") + test_prop_4 = db.Property(name="attendees", datatype=db.LIST(test_rt_0)) + test_rt_2.add_property(test_prop_4) + + # Setup data + test_person_0 = db.Record(name="Person 0").add_parent(test_rt_0).add_property( + test_prop_0, value="Their Name") + test_person_1 = db.Record(name="Person 1").add_parent(test_rt_0).add_property( + test_prop_0, value="Also Name") + test_person_2 = db.Record(name="Person 2").add_parent(test_rt_0).add_property( + test_prop_0, value="Third Name") + test_observation_0 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-01-01") + .add_property(test_prop_2, value=5) + .add_property(test_prop_3, value=test_person_1)) + test_observation_1 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-02-02") + .add_property(test_prop_2, value=3) + .add_property(test_prop_3, value=test_person_0)) + test_observation_2 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-03-03") + .add_property(test_prop_2, value=12) + .add_property(test_prop_3, value=test_person_0)) + test_observation_3 = (db.Record().add_parent(test_rt_1) + .add_property(test_prop_1, value="2025-04-04") + .add_property(test_prop_2, value=0) + .add_property(test_prop_3, value=test_person_2)) + test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) + .add_property(test_prop_4, value=[test_person_1])) + test_conference_1 = (db.Record(name="Official Conf", description="For everyone") + .add_parent(test_rt_2) + .add_property(test_prop_4, + value=[test_person_0, test_person_1, test_person_2])) + testdata = [test_rt_0, test_rt_1, test_rt_2, + test_prop_0, test_prop_1, test_prop_2, + test_prop_3, test_prop_4, + test_person_0, test_person_1, test_person_2, + test_observation_0, test_observation_1, test_observation_2, test_observation_3, + test_conference_0, test_conference_1] + + # Insert + c = db.Container() + c.extend(testdata) + c.insert() def test_successful_export(): records = next(db.execute_query("FIND Record", page_length=50)) @@ -208,6 +198,10 @@ def test_successful_export(): def test_export_list_refs(tmpdir): """Test the export to XLSX of list-valued references. + + We retrieve all "Training" Records from LinkAhead and run `export_container_to_xlsx` on the + result. This shall create an XLSX template, a JSON schema, a filled JSON, and a filled XLSX. + All are checked against our expectation. """ tmpdir = Path(tmpdir) # Setup database @@ -230,15 +224,18 @@ def test_export_list_refs(tmpdir): with open(tmpdir/"schema.json", encoding="utf-8") as schema_f: schema_generated = json.load(schema_f) - training = schema_generated.get("properties", {}).get("Training") - assert training - assert len(schema_generated.get("properties", {})) == 1 # All top-level sheets? - for props in (training["properties"], - training["properties"]["trainer"]["items"]["properties"], - training["properties"]["participant"]["items"]["properties"], - training["properties"]["Organisation"]["items"]["properties"], - ): - assert "id" in props.keys() + try: + assert len(schema_generated["properties"]) == 1 # Only 'Training' should be top level + training = schema_generated["properties"]["Training"] + for props in (training["properties"], + training["properties"]["trainer"]["items"]["properties"], + training["properties"]["participant"]["items"]["properties"], + training["properties"]["Organisation"]["items"]["properties"], + ): + assert "id" in props.keys() + except KeyError: + print("It seems the generated JSON schema does not have the expected structure!") + raise # Check: XLSX template template_known_good = load_workbook(rfp("data", "multiple_refs_template.xlsx")) -- GitLab From 177d8d2f0be8c7d7eee281561181cff2fde3dba0 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 17:23:51 +0200 Subject: [PATCH 17/36] TEST: Try casting before comparison. --- integrationtests/test_ex_import_xlsx.py | 11 ++++++----- unittests/table_json_conversion/utils.py | 17 +++++++++++++++-- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 9a9a9a06..15fc437a 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -47,6 +47,7 @@ from utils import ( # noqa: E402, pylint: disable=wrong-import-position set_test_key("_CAOSDB_ADV_TEST_SUITE") + def rfp(*pathcomponents): """ Return full path. @@ -166,11 +167,11 @@ def setup(clear_database): .add_property(test_prop_2, value=0) .add_property(test_prop_3, value=test_person_2)) test_conference_0 = (db.Record(description="Only for Also").add_parent(test_rt_2) - .add_property(test_prop_4, value=[test_person_1])) + .add_property(test_prop_4, value=[test_person_1])) test_conference_1 = (db.Record(name="Official Conf", description="For everyone") - .add_parent(test_rt_2) - .add_property(test_prop_4, - value=[test_person_0, test_person_1, test_person_2])) + .add_parent(test_rt_2) + .add_property(test_prop_4, + value=[test_person_0, test_person_1, test_person_2])) testdata = [test_rt_0, test_rt_1, test_rt_2, test_prop_0, test_prop_1, test_prop_2, test_prop_3, test_prop_4, @@ -183,6 +184,7 @@ def setup(clear_database): c.extend(testdata) c.insert() + def test_successful_export(): records = next(db.execute_query("FIND Record", page_length=50)) tmp_path = Path('temp_test_successful_export.xlsx') @@ -252,7 +254,6 @@ def test_export_list_refs(tmpdir): # Check: Filled XLSX filled_generated = load_workbook(tmpdir / "result.xlsx") # For the moment: just check a few samples - assert filled_generated.get_sheet_names() == ['Training', 'Training.Organisation', 'Training.Organisation.Person', diff --git a/unittests/table_json_conversion/utils.py b/unittests/table_json_conversion/utils.py index da3d17cf..846809d8 100644 --- a/unittests/table_json_conversion/utils.py +++ b/unittests/table_json_conversion/utils.py @@ -21,6 +21,7 @@ """Utilities for the tests. """ +from datetime import datetime from typing import Iterable, Union from openpyxl import Workbook @@ -48,7 +49,8 @@ Raise an assertion exception if they are not equal.""" assert_equal_jsons(el1, el2, allow_none=allow_none, allow_empty=allow_empty, path=this_path) continue - assert el1 == el2, f"Values at path {this_path} are not equal:\n{el1},\n{el2}" + assert equals_with_casting(el1, el2), ( + f"Values at path {this_path} are not equal:\n{el1},\n{el2}") continue # Case 2: exists only in one collection existing = json1.get(key, json2.get(key)) @@ -66,7 +68,18 @@ Raise an assertion exception if they are not equal.""" assert_equal_jsons(el1, el2, allow_none=allow_none, allow_empty=allow_empty, path=this_path) else: - assert el1 == el2, f"Values at path {this_path} are not equal:\n{el1},\n{el2}" + assert equals_with_casting(el1, el2), ( + f"Values at path {this_path} are not equal:\n{el1},\n{el2}") + + +def equals_with_casting(value1, value2) -> bool: + """Compare two values, return True if equal, False otherwise. Try to cast to clever datatypes. + """ + try: + return datetime.fromisoformat(value1) == datetime.fromisoformat(value2) + except (ValueError, TypeError): + pass + return value1 == value2 def purge_from_json(data: Union[dict, list], remove_keys: list[str]) -> Union[dict, list]: -- GitLab From bdf50f31c18a3cdafd52052357e360119510d4ef Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 18:46:03 +0200 Subject: [PATCH 18/36] PERF: Using cache and new high-level API conversion. --- .../export_import_xlsx.py | 34 ++++++++++++++----- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index cb719824..816d9168 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -32,6 +32,7 @@ from linkahead import ( Container, # LinkAheadException, ) +from linkahead.cached import cached_get_entity_by from ..json_schema_exporter import JsonSchemaExporter, merge_schemas from .table_generator import XLSXTemplateGenerator @@ -41,7 +42,8 @@ from .fill_xlsx import fill_template # experimental. We know this, so suppress the warning. logging.disable(logging.WARNING) from linkahead.high_level_api import ( # noqa: E402, pylint: disable=wrong-import-position - convert_to_python_object, query + convert_to_python_object, + # query ) logging.disable(logging.NOTSET) @@ -176,7 +178,20 @@ def export_container_to_xlsx(records: Container, # 1. Generate json schema for all top-level record types - recordtypes = {record.parents[0].retrieve() for record in records} + rt_ids = set() + rt_names = set() + recordtypes = set() + for record in records: + parent = record.parents[0] + if parent.id: + rt_ids.add(parent.id) + else: + rt_names.add(parent.name) + for rt_name in rt_names: + rt_ids.add(cached_get_entity_by(name=rt_name).id) + for rt_id in rt_ids: + recordtypes.add(cached_get_entity_by(eid=rt_id)) + # recordtype_ids = {recordtype.id for recordtype in recordtypes} # recordtypes = [execute_query(f"FIND RECORDTYPE WITH (ID = {rt_id})", # unique=True) @@ -217,13 +232,14 @@ def export_container_to_xlsx(records: Container, # Retrieve data if include_referenced_entities: - new_records = [] - entity_ids = {record.id for record in records} - for entity_id in entity_ids: - entity_id = str(entity_id).split('@')[0] - new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) - # ToDo: Handle Files and other Entities (e.g. Properties) separately - high_level_objs = new_records + # new_records = [] + # entity_ids = {record.id for record in records} + # for entity_id in entity_ids: + # entity_id = str(entity_id).split('@')[0] + # new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) + # ToDo: Handle Files and other Entities (e.g. Properties) separately + high_level_objs = [convert_to_python_object(rec, resolve_references=True) + for rec in records] else: high_level_objs = [convert_to_python_object(rec) for rec in records] -- GitLab From 099eaf89d7134afe18c12c6bf9cc32f0778b0446 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 19:13:27 +0200 Subject: [PATCH 19/36] REFACTOR: Simplified code and listened to deprecation warnings. --- integrationtests/test_ex_import_xlsx.py | 16 ++++++++-------- .../table_json_conversion/export_import_xlsx.py | 5 ++--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 15fc437a..61949b91 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -254,17 +254,17 @@ def test_export_list_refs(tmpdir): # Check: Filled XLSX filled_generated = load_workbook(tmpdir / "result.xlsx") # For the moment: just check a few samples - assert filled_generated.get_sheet_names() == ['Training', - 'Training.Organisation', - 'Training.Organisation.Person', - 'Training.participant', - 'Training.trainer', - ] - sheet_training = filled_generated.get_sheet_by_name("Training") + assert filled_generated.sheetnames == ['Training', + 'Training.Organisation', + 'Training.Organisation.Person', + 'Training.participant', + 'Training.trainer', + ] + sheet_training = filled_generated["Training"] assert sheet_training.max_row == 7 assert sheet_training.max_column == 19 assert sheet_training["D7"].value == datetime(2024, 3, 21, 15, 12) - sheet_top = filled_generated.get_sheet_by_name("Training.Organisation.Person") + sheet_top = filled_generated["Training.Organisation.Person"] assert sheet_top.max_row == 11 assert sheet_top.max_column == 7 assert sheet_top["B8"].value == sheet_training["B7"].value # Check foreign key component diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 816d9168..6683771c 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -238,10 +238,9 @@ def export_container_to_xlsx(records: Container, # entity_id = str(entity_id).split('@')[0] # new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) # ToDo: Handle Files and other Entities (e.g. Properties) separately - high_level_objs = [convert_to_python_object(rec, resolve_references=True) - for rec in records] + high_level_objs = convert_to_python_object(records, resolve_references=True) else: - high_level_objs = [convert_to_python_object(rec) for rec in records] + high_level_objs = convert_to_python_object(records) json_data = _generate_jsondata_from_records(high_level_objs, jsondata_filepath) -- GitLab From 701387f5c369bdb1d544c2709289af921e1b7138 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 19:25:49 +0200 Subject: [PATCH 20/36] REFACTOR: Even more simplifications. --- .../export_import_xlsx.py | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index 6683771c..d85bb9a0 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -231,17 +231,19 @@ def export_container_to_xlsx(records: Container, # f"'{prop.value}' because of error '{e}'") # Retrieve data - if include_referenced_entities: - # new_records = [] - # entity_ids = {record.id for record in records} - # for entity_id in entity_ids: - # entity_id = str(entity_id).split('@')[0] - # new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) - # ToDo: Handle Files and other Entities (e.g. Properties) separately - high_level_objs = convert_to_python_object(records, resolve_references=True) - else: - high_level_objs = convert_to_python_object(records) - + # if include_referenced_entities: + # # new_records = [] + # # entity_ids = {record.id for record in records} + # # for entity_id in entity_ids: + # # entity_id = str(entity_id).split('@')[0] + # # new_records.extend(query(f"FIND ENTITY WITH (ID = {entity_id})")) + # # ToDo: Handle Files and other Entities (e.g. Properties) separately + # high_level_objs = convert_to_python_object(records, resolve_references=True) + # else: + # high_level_objs = convert_to_python_object(records) + + high_level_objs = convert_to_python_object(records, + resolve_references=include_referenced_entities) json_data = _generate_jsondata_from_records(high_level_objs, jsondata_filepath) # XLSX generation and filling with data ################ -- GitLab From 65108a5276eba27c60f391556a9e93f80509017e Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 19:39:02 +0200 Subject: [PATCH 21/36] BUILD: Added profile.yaml for running integration tests. --- README_SETUP.md | 13 +- .../other/restore/caosroot.example.tar.gz | Bin 0 -> 169 bytes .../custom/other/restore/restore.dump.sql | 5821 +++++++++++++++++ .../test_profile/paths/extroot/README.md | 2 + integrationtests/test_profile/profile.yaml | 29 + 5 files changed, 5856 insertions(+), 9 deletions(-) create mode 100644 integrationtests/test_profile/custom/other/restore/caosroot.example.tar.gz create mode 100644 integrationtests/test_profile/custom/other/restore/restore.dump.sql create mode 100644 integrationtests/test_profile/paths/extroot/README.md create mode 100644 integrationtests/test_profile/profile.yaml diff --git a/README_SETUP.md b/README_SETUP.md index e52885ff..5dbc2341 100644 --- a/README_SETUP.md +++ b/README_SETUP.md @@ -33,16 +33,11 @@ entries `install_requires` and `extras_require`. ## Run Integration Tests Locally -1. Change directory to `integrationtests/`. -2. Mount `extroot` to the folder that will be used as extroot. E.g. `sudo mount - -o bind extroot ../../caosdb-deploy/profiles/debug/paths/extroot` (or - whatever path the extroot of the empty profile to be used is located at). -3. Start (or restart) an empty (!) CaosDB instance (with the mounted - extroot). The database will be cleared during testing, so it's important to - use an empty instance. Make sure your configuration for the python caosdb - module is correct and allows to connect to the server. -4. Run `test.sh`. Note that this may modify content of the +1. Start LinkAhead using the profile in `integrationtests/test_profile/profile.yaml` +2. Change directory to `integrationtests/`. +3. Run `test.sh`. Note that this may modify content of the `integrationtest/extroot/` directory. +4. Alternatively, run single tests: `pyest integrationtest/test_foo.py` ## Code Formatting diff --git a/integrationtests/test_profile/custom/other/restore/caosroot.example.tar.gz b/integrationtests/test_profile/custom/other/restore/caosroot.example.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..5e02a693960c64d3c82401e2de4abd72f7fd5fd1 GIT binary patch literal 169 zcmb2|=3oE==C@bwa~)C;X(*IGYJOs|+;^MT{2~{7a`)E1Q<7+2;mFtYzI^cl@k8YX z_m9tAc}BGN)fTVCdHeQBaC#oi4w~L0{86aq>Mj1+Hf<Z<t~Grgd_8{M`}eh0|ATja zvENc@@$Pl+!C(LEj=B8TJ@f1SF0c55zy2-cVfed!!wdV|-*f7%IadF2*Zh0c^b0%K SIS}H<DPe&{k@^f83=9CLWKWO) literal 0 HcmV?d00001 diff --git a/integrationtests/test_profile/custom/other/restore/restore.dump.sql b/integrationtests/test_profile/custom/other/restore/restore.dump.sql new file mode 100644 index 00000000..d14b75b5 --- /dev/null +++ b/integrationtests/test_profile/custom/other/restore/restore.dump.sql @@ -0,0 +1,5821 @@ +-- MariaDB dump 10.19 Distrib 10.11.6-MariaDB, for debian-linux-gnu (x86_64) +-- +-- Host: sqldb Database: caosdb +-- ------------------------------------------------------ +-- Server version 10.5.25-MariaDB-ubu2004 + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!40101 SET NAMES utf8 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- +-- Table structure for table `archive_collection_type` +-- + +DROP TABLE IF EXISTS `archive_collection_type`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_collection_type` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `collection` varchar(255) NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + UNIQUE KEY `archive_collection_type-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`), + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_collection_type_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_collection_type_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_collection_type_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_collection_type` +-- + +LOCK TABLES `archive_collection_type` WRITE; +/*!40000 ALTER TABLE `archive_collection_type` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_collection_type` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_data_type` +-- + +DROP TABLE IF EXISTS `archive_data_type`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_data_type` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `datatype` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + UNIQUE KEY `archive_data_type-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`), + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + KEY `datatype` (`datatype`), + CONSTRAINT `archive_data_type_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_data_type_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_data_type_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_data_type_ibfk_4` FOREIGN KEY (`datatype`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_data_type` +-- + +LOCK TABLES `archive_data_type` WRITE; +/*!40000 ALTER TABLE `archive_data_type` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_data_type` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_date_data` +-- + +DROP TABLE IF EXISTS `archive_date_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_date_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` int(11) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_date_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_date_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_date_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_date_data` +-- + +LOCK TABLES `archive_date_data` WRITE; +/*!40000 ALTER TABLE `archive_date_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_date_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_datetime_data` +-- + +DROP TABLE IF EXISTS `archive_datetime_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_datetime_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` bigint(20) NOT NULL, + `value_ns` int(10) unsigned DEFAULT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_datetime_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_datetime_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_datetime_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_datetime_data` +-- + +LOCK TABLES `archive_datetime_data` WRITE; +/*!40000 ALTER TABLE `archive_datetime_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_datetime_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_desc_overrides` +-- + +DROP TABLE IF EXISTS `archive_desc_overrides`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_desc_overrides` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `description` text NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + UNIQUE KEY `archive_desc_overrides-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`), + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_desc_overrides_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_desc_overrides_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_desc_overrides_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_desc_overrides` +-- + +LOCK TABLES `archive_desc_overrides` WRITE; +/*!40000 ALTER TABLE `archive_desc_overrides` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_desc_overrides` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_double_data` +-- + +DROP TABLE IF EXISTS `archive_double_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_double_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` double NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + `unit_sig` bigint(20) DEFAULT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_double_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_double_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_double_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_double_data` +-- + +LOCK TABLES `archive_double_data` WRITE; +/*!40000 ALTER TABLE `archive_double_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_double_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_entities` +-- + +DROP TABLE IF EXISTS `archive_entities`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_entities` ( + `id` int(10) unsigned NOT NULL, + `description` text DEFAULT NULL, + `role` enum('RECORDTYPE','RECORD','FILE','_REPLACEMENT','PROPERTY','DATATYPE','ROLE','QUERYTEMPLATE') NOT NULL, + `acl` int(10) unsigned DEFAULT NULL, + `_iversion` int(10) unsigned NOT NULL, + PRIMARY KEY (`id`,`_iversion`), + KEY `acl` (`acl`), + CONSTRAINT `archive_entities_ibfk_1` FOREIGN KEY (`id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE, + CONSTRAINT `archive_entities_ibfk_2` FOREIGN KEY (`acl`) REFERENCES `entity_acl` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_entities` +-- + +LOCK TABLES `archive_entities` WRITE; +/*!40000 ALTER TABLE `archive_entities` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_entities` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_enum_data` +-- + +DROP TABLE IF EXISTS `archive_enum_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_enum_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` varbinary(255) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_enum_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_enum_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_enum_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_enum_data` +-- + +LOCK TABLES `archive_enum_data` WRITE; +/*!40000 ALTER TABLE `archive_enum_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_enum_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_files` +-- + +DROP TABLE IF EXISTS `archive_files`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_files` ( + `file_id` int(10) unsigned NOT NULL, + `path` text NOT NULL, + `size` bigint(20) unsigned NOT NULL, + `hash` binary(64) DEFAULT NULL, + `_iversion` int(10) unsigned NOT NULL, + PRIMARY KEY (`file_id`,`_iversion`), + CONSTRAINT `archive_files_ibfk_1` FOREIGN KEY (`file_id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_files` +-- + +LOCK TABLES `archive_files` WRITE; +/*!40000 ALTER TABLE `archive_files` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_files` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_integer_data` +-- + +DROP TABLE IF EXISTS `archive_integer_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_integer_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` bigint(20) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + `unit_sig` bigint(20) DEFAULT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_integer_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_integer_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_integer_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_integer_data` +-- + +LOCK TABLES `archive_integer_data` WRITE; +/*!40000 ALTER TABLE `archive_integer_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_integer_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_isa` +-- + +DROP TABLE IF EXISTS `archive_isa`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_isa` ( + `child` int(10) unsigned NOT NULL, + `child_iversion` int(10) unsigned NOT NULL, + `parent` int(10) unsigned NOT NULL, + `direct` tinyint(1) DEFAULT 1, + KEY `parent` (`parent`), + KEY `child` (`child`,`child_iversion`), + CONSTRAINT `archive_isa_ibfk_1` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_isa_ibfk_2` FOREIGN KEY (`child`, `child_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_isa` +-- + +LOCK TABLES `archive_isa` WRITE; +/*!40000 ALTER TABLE `archive_isa` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_isa` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_name_data` +-- + +DROP TABLE IF EXISTS `archive_name_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_name_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` varchar(255) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `value` (`value`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_name_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_name_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_name_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_name_data` +-- + +LOCK TABLES `archive_name_data` WRITE; +/*!40000 ALTER TABLE `archive_name_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_name_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_name_overrides` +-- + +DROP TABLE IF EXISTS `archive_name_overrides`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_name_overrides` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `name` varchar(255) NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + UNIQUE KEY `archive_name_overrides-d-e-p-v` (`domain_id`,`entity_id`,`property_id`,`_iversion`), + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_name_overrides_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_name_overrides_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_name_overrides_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_name_overrides` +-- + +LOCK TABLES `archive_name_overrides` WRITE; +/*!40000 ALTER TABLE `archive_name_overrides` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_name_overrides` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_null_data` +-- + +DROP TABLE IF EXISTS `archive_null_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_null_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_null_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_null_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_null_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_null_data` +-- + +LOCK TABLES `archive_null_data` WRITE; +/*!40000 ALTER TABLE `archive_null_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_null_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_query_template_def` +-- + +DROP TABLE IF EXISTS `archive_query_template_def`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_query_template_def` ( + `id` int(10) unsigned NOT NULL, + `definition` mediumtext NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + PRIMARY KEY (`id`,`_iversion`), + CONSTRAINT `archive_query_template_def_ibfk_1` FOREIGN KEY (`id`, `_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_query_template_def` +-- + +LOCK TABLES `archive_query_template_def` WRITE; +/*!40000 ALTER TABLE `archive_query_template_def` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_query_template_def` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_reference_data` +-- + +DROP TABLE IF EXISTS `archive_reference_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_reference_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` int(10) unsigned NOT NULL, + `value_iversion` int(10) unsigned DEFAULT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + KEY `value` (`value`), + CONSTRAINT `archive_reference_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_reference_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_reference_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_reference_data_ibfk_4` FOREIGN KEY (`value`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_reference_data` +-- + +LOCK TABLES `archive_reference_data` WRITE; +/*!40000 ALTER TABLE `archive_reference_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_reference_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `archive_text_data` +-- + +DROP TABLE IF EXISTS `archive_text_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `archive_text_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` text NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') NOT NULL, + `pidx` int(10) unsigned NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`,`_iversion`), + KEY `domain_id_2` (`domain_id`,`_iversion`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `archive_text_data_ibfk_1` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_text_data_ibfk_2` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `archive_text_data_ibfk_3` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `archive_text_data` +-- + +LOCK TABLES `archive_text_data` WRITE; +/*!40000 ALTER TABLE `archive_text_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `archive_text_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `collection_type` +-- + +DROP TABLE IF EXISTS `collection_type`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `collection_type` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `collection` varchar(255) NOT NULL, + UNIQUE KEY `collection_type-d-e-p` (`domain_id`,`entity_id`,`property_id`), + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + CONSTRAINT `collection_type_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `collection_type_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `collection_type_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `collection_type` +-- + +LOCK TABLES `collection_type` WRITE; +/*!40000 ALTER TABLE `collection_type` DISABLE KEYS */; +/*!40000 ALTER TABLE `collection_type` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `data_type` +-- + +DROP TABLE IF EXISTS `data_type`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `data_type` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `datatype` int(10) unsigned NOT NULL, + UNIQUE KEY `datatype_ukey` (`domain_id`,`entity_id`,`property_id`), + KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `datatype_forkey_ent` (`entity_id`), + KEY `datatype_forkey_pro` (`property_id`), + KEY `datatype_forkey_type` (`datatype`), + CONSTRAINT `datatype_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `datatype_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `datatype_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`), + CONSTRAINT `datatype_forkey_type` FOREIGN KEY (`datatype`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `data_type` +-- + +LOCK TABLES `data_type` WRITE; +/*!40000 ALTER TABLE `data_type` DISABLE KEYS */; +INSERT INTO `data_type` VALUES +(0,0,20,14), +(0,0,21,14), +(0,0,24,14); +/*!40000 ALTER TABLE `data_type` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `date_data` +-- + +DROP TABLE IF EXISTS `date_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `date_data` ( + `domain_id` int(10) unsigned DEFAULT NULL, + `entity_id` int(10) unsigned DEFAULT NULL, + `property_id` int(10) unsigned DEFAULT NULL, + `value` int(11) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') DEFAULT NULL, + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + KEY `date_data_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `date_ov_forkey_ent` (`entity_id`), + KEY `date_ov_forkey_pro` (`property_id`), + CONSTRAINT `date_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `date_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `date_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `date_data` +-- + +LOCK TABLES `date_data` WRITE; +/*!40000 ALTER TABLE `date_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `date_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `datetime_data` +-- + +DROP TABLE IF EXISTS `datetime_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `datetime_data` ( + `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.', + `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.', + `property_id` int(10) unsigned NOT NULL COMMENT 'Property.', + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL COMMENT 'Status of this statement.', + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + `value_ns` int(10) unsigned DEFAULT NULL, + `value` bigint(20) NOT NULL, + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `dat_entity_id_entity` (`entity_id`), + KEY `dat_property_id_entity` (`property_id`), + CONSTRAINT `dat_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `dat_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `dat_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `datetime_data` +-- + +LOCK TABLES `datetime_data` WRITE; +/*!40000 ALTER TABLE `datetime_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `datetime_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `desc_overrides` +-- + +DROP TABLE IF EXISTS `desc_overrides`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `desc_overrides` ( + `domain_id` int(10) unsigned DEFAULT NULL, + `entity_id` int(10) unsigned DEFAULT NULL, + `property_id` int(10) unsigned DEFAULT NULL, + `description` text DEFAULT NULL, + UNIQUE KEY `desc_ov_ukey` (`domain_id`,`entity_id`,`property_id`), + KEY `desc_ov_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `desc_ov_forkey_ent` (`entity_id`), + KEY `desc_ov_forkey_pro` (`property_id`), + CONSTRAINT `desc_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `desc_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `desc_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `desc_overrides` +-- + +LOCK TABLES `desc_overrides` WRITE; +/*!40000 ALTER TABLE `desc_overrides` DISABLE KEYS */; +/*!40000 ALTER TABLE `desc_overrides` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `double_data` +-- + +DROP TABLE IF EXISTS `double_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `double_data` ( + `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.', + `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.', + `property_id` int(10) unsigned NOT NULL COMMENT 'Property.', + `value` double NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL COMMENT 'Status of this statement.', + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + `unit_sig` bigint(20) DEFAULT NULL, + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `dou_entity_id_entity` (`entity_id`), + KEY `dou_property_id_entity` (`property_id`), + CONSTRAINT `dou_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `dou_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `dou_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `double_data` +-- + +LOCK TABLES `double_data` WRITE; +/*!40000 ALTER TABLE `double_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `double_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `entities` +-- + +DROP TABLE IF EXISTS `entities`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `entities` ( + `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT 'Unique identifier.', + `description` text DEFAULT NULL, + `role` enum('RECORDTYPE','RECORD','FILE','_REPLACEMENT','PROPERTY','DATATYPE','ROLE','QUERYTEMPLATE') NOT NULL, + `acl` int(10) unsigned DEFAULT NULL COMMENT 'Access Control List for the entity.', + PRIMARY KEY (`id`), + KEY `entity_entity_acl` (`acl`), + CONSTRAINT `entity_entity_acl` FOREIGN KEY (`acl`) REFERENCES `entity_acl` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=100 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `entities` +-- + +LOCK TABLES `entities` WRITE; +/*!40000 ALTER TABLE `entities` DISABLE KEYS */; +INSERT INTO `entities` VALUES +(0,'The default domain.','ROLE',0), +(1,'The default recordtype.','ROLE',0), +(2,'The default record.','ROLE',0), +(3,'The default file.','ROLE',0), +(4,'The default property.','ROLE',0), +(7,'The default datatype.','ROLE',0), +(8,'The QueryTemplate role.','ROLE',0), +(11,'The default reference data type.','DATATYPE',0), +(12,'The default integer data type.','DATATYPE',0), +(13,'The default double data type.','DATATYPE',0), +(14,'The default text data type.','DATATYPE',0), +(15,'The default datetime data type.','DATATYPE',0), +(16,'The default timespan data type.','DATATYPE',0), +(17,'The default file reference data type.','DATATYPE',0), +(18,'The defaulf boolean data type','DATATYPE',0), +(20,'Name of an entity','PROPERTY',0), +(21,'Unit of an entity.','PROPERTY',0), +(24,'Description of an entity.','PROPERTY',0); +/*!40000 ALTER TABLE `entities` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `entity_acl` +-- + +DROP TABLE IF EXISTS `entity_acl`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `entity_acl` ( + `id` int(10) unsigned NOT NULL AUTO_INCREMENT, + `acl` varbinary(65525) NOT NULL, + PRIMARY KEY (`id`), + KEY `entity_acl_acl` (`acl`(3072)) +) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `entity_acl` +-- + +LOCK TABLES `entity_acl` WRITE; +/*!40000 ALTER TABLE `entity_acl` DISABLE KEYS */; +INSERT INTO `entity_acl` VALUES +(0,''); +/*!40000 ALTER TABLE `entity_acl` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `entity_ids` +-- + +DROP TABLE IF EXISTS `entity_ids`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `entity_ids` ( + `id` varchar(255) NOT NULL, + `internal_id` int(10) unsigned NOT NULL COMMENT 'Internal ID of an entity. This id is used internally in the *_data tables and elsewhere. This ID is never exposed via the CaosDB API.', + PRIMARY KEY (`id`), + KEY `entity_ids_internal_id` (`internal_id`), + CONSTRAINT `entity_ids_internal_id` FOREIGN KEY (`internal_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `entity_ids` +-- + +LOCK TABLES `entity_ids` WRITE; +/*!40000 ALTER TABLE `entity_ids` DISABLE KEYS */; +INSERT INTO `entity_ids` VALUES +('1',1), +('2',2), +('3',3), +('4',4), +('7',7), +('8',8), +('11',11), +('12',12), +('13',13), +('14',14), +('15',15), +('16',16), +('17',17), +('18',18), +('20',20), +('21',21), +('24',24); +/*!40000 ALTER TABLE `entity_ids` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `entity_version` +-- + +DROP TABLE IF EXISTS `entity_version`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `entity_version` ( + `entity_id` int(10) unsigned NOT NULL, + `hash` varbinary(255) DEFAULT NULL, + `version` varbinary(255) NOT NULL, + `_iversion` int(10) unsigned NOT NULL, + `_ipparent` int(10) unsigned DEFAULT NULL, + `srid` varbinary(255) NOT NULL, + PRIMARY KEY (`entity_id`,`_iversion`), + UNIQUE KEY `entity_version-e-v` (`entity_id`,`version`), + KEY `srid` (`srid`), + CONSTRAINT `entity_version_ibfk_1` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`) ON DELETE CASCADE, + CONSTRAINT `entity_version_ibfk_2` FOREIGN KEY (`srid`) REFERENCES `transactions` (`srid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `entity_version` +-- + +LOCK TABLES `entity_version` WRITE; +/*!40000 ALTER TABLE `entity_version` DISABLE KEYS */; +INSERT INTO `entity_version` VALUES +(0,NULL,'a22e13cc0a0d6abc63a520b038ffb11f937f2fe5',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(1,NULL,'8206d5a37ccedf6e2101274cacabf613c464963e',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(2,NULL,'5ff5012d83e097bd915bde08d50ef1570b868bba',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(3,NULL,'03f4ee27316f17c90e421cd474cc0004defddf47',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(4,NULL,'8dfef42117275fbc475b607f95b19810f4de2de5',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(7,NULL,'0a283af5f5b162a36744b2043dc6caeabb881e25',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(8,NULL,'8acd25b0ce6fbdb9841bb214f60a06f5dc31bc3f',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(11,NULL,'be63da8ec865803eabcdb6bba4d2156cbaee524a',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(12,NULL,'87f06890501ab9a4c767c2c662e240e8b9749d01',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(13,NULL,'3c8c0e26528d4f38defcfdcf4bb767327eb4f295',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(14,NULL,'d41b564762eda4fc3405cd45dd6f1e74727b27dc',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(15,NULL,'b060268f9575814bffbeeaec3af4eac32cbc32b3',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(16,NULL,'3639e7d0691aad743b283c340c183e6bbd46ec68',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(17,NULL,'e3e837ac410cd07ede3ca7a7e6072488c4313b96',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(18,NULL,'14b349a86c04ec27ec0035c1cbeb92f700342236',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(20,NULL,'9030ad4b0f91c832ddb4a7c9d9b090c1f035b1d3',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(21,NULL,'6a693e36a2afae1cbc571cce52c1676065c1ed24',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'), +(24,NULL,'d78e2b50954d74d6946d2c113cb7393b3b6146f3',1,NULL,'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'); +/*!40000 ALTER TABLE `entity_version` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `enum_data` +-- + +DROP TABLE IF EXISTS `enum_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `enum_data` ( + `domain_id` int(10) unsigned DEFAULT NULL, + `entity_id` int(10) unsigned DEFAULT NULL, + `property_id` int(10) unsigned DEFAULT NULL, + `value` varbinary(255) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') DEFAULT NULL, + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + KEY `enum_ov_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `enum_ov_forkey_ent` (`entity_id`), + KEY `enum_ov_forkey_pro` (`property_id`), + CONSTRAINT `enum_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `enum_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `enum_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `enum_data` +-- + +LOCK TABLES `enum_data` WRITE; +/*!40000 ALTER TABLE `enum_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `enum_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `feature_config` +-- + +DROP TABLE IF EXISTS `feature_config`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `feature_config` ( + `_key` varchar(255) NOT NULL, + `_value` varchar(255) DEFAULT NULL, + PRIMARY KEY (`_key`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `feature_config` +-- + +LOCK TABLES `feature_config` WRITE; +/*!40000 ALTER TABLE `feature_config` DISABLE KEYS */; +INSERT INTO `feature_config` VALUES +('ENTITY_VERSIONING','ENABLED'); +/*!40000 ALTER TABLE `feature_config` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `files` +-- + +DROP TABLE IF EXISTS `files`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `files` ( + `file_id` int(10) unsigned NOT NULL COMMENT 'The file''s ID.', + `path` varchar(255) NOT NULL COMMENT 'Directory of the file.', + `size` bigint(20) unsigned NOT NULL COMMENT 'Size in kB (oktet bytes).', + `hash` binary(64) DEFAULT NULL, + `checked_timestamp` bigint(20) NOT NULL DEFAULT 0, + PRIMARY KEY (`file_id`), + CONSTRAINT `fil_file_id_entity` FOREIGN KEY (`file_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `files` +-- + +LOCK TABLES `files` WRITE; +/*!40000 ALTER TABLE `files` DISABLE KEYS */; +/*!40000 ALTER TABLE `files` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `integer_data` +-- + +DROP TABLE IF EXISTS `integer_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `integer_data` ( + `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.', + `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.', + `property_id` int(10) unsigned NOT NULL COMMENT 'Property.', + `value` bigint(20) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL COMMENT 'Status of this statement.', + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + `unit_sig` bigint(20) DEFAULT NULL, + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `int_entity_id_entity` (`entity_id`), + KEY `int_property_id_entity` (`property_id`), + CONSTRAINT `int_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `int_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `int_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `integer_data` +-- + +LOCK TABLES `integer_data` WRITE; +/*!40000 ALTER TABLE `integer_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `integer_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `isa_cache` +-- + +DROP TABLE IF EXISTS `isa_cache`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `isa_cache` ( + `child` int(10) unsigned NOT NULL, + `parent` int(10) unsigned NOT NULL, + `rpath` varchar(255) NOT NULL, + PRIMARY KEY (`child`,`parent`,`rpath`), + KEY `isa_cache_parent_entity` (`parent`), + CONSTRAINT `isa_cache_child_entity` FOREIGN KEY (`child`) REFERENCES `entities` (`id`), + CONSTRAINT `isa_cache_parent_entity` FOREIGN KEY (`parent`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `isa_cache` +-- + +LOCK TABLES `isa_cache` WRITE; +/*!40000 ALTER TABLE `isa_cache` DISABLE KEYS */; +/*!40000 ALTER TABLE `isa_cache` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `name_data` +-- + +DROP TABLE IF EXISTS `name_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `name_data` ( + `domain_id` int(10) unsigned NOT NULL, + `entity_id` int(10) unsigned NOT NULL, + `property_id` int(10) unsigned NOT NULL, + `value` varchar(255) NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL, + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + UNIQUE KEY `domain_id_2` (`domain_id`,`entity_id`,`property_id`), + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `entity_id` (`entity_id`), + KEY `property_id` (`property_id`), + KEY `value` (`value`), + CONSTRAINT `name_data_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `name_data_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `name_data_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `name_data` +-- + +LOCK TABLES `name_data` WRITE; +/*!40000 ALTER TABLE `name_data` DISABLE KEYS */; +INSERT INTO `name_data` VALUES +(0,0,20,'DOMAIN','FIX',0), +(0,1,20,'RECORDTYPE','FIX',0), +(0,2,20,'RECORD','FIX',0), +(0,3,20,'FILE','FIX',0), +(0,4,20,'PROPERTY','FIX',0), +(0,7,20,'DATATYPE','FIX',0), +(0,8,20,'QUERYTEMPLATE','FIX',0), +(0,11,20,'REFERENCE','FIX',0), +(0,12,20,'INTEGER','FIX',0), +(0,13,20,'DOUBLE','FIX',0), +(0,14,20,'TEXT','FIX',0), +(0,15,20,'DATETIME','FIX',0), +(0,16,20,'TIMESPAN','FIX',0), +(0,17,20,'FILE','FIX',0), +(0,18,20,'BOOLEAN','FIX',0), +(0,20,20,'name','FIX',0), +(0,21,20,'unit','FIX',0), +(0,24,20,'description','FIX',0); +/*!40000 ALTER TABLE `name_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `name_overrides` +-- + +DROP TABLE IF EXISTS `name_overrides`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `name_overrides` ( + `domain_id` int(10) unsigned DEFAULT NULL, + `entity_id` int(10) unsigned DEFAULT NULL, + `property_id` int(10) unsigned DEFAULT NULL, + `name` varchar(255) DEFAULT NULL, + UNIQUE KEY `name_ov_ukey` (`domain_id`,`entity_id`,`property_id`), + KEY `name_ov_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `name_ov_forkey_ent` (`entity_id`), + KEY `name_ov_forkey_pro` (`property_id`), + CONSTRAINT `name_ov_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `name_ov_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `name_ov_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `name_overrides` +-- + +LOCK TABLES `name_overrides` WRITE; +/*!40000 ALTER TABLE `name_overrides` DISABLE KEYS */; +/*!40000 ALTER TABLE `name_overrides` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `null_data` +-- + +DROP TABLE IF EXISTS `null_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `null_data` ( + `domain_id` int(10) unsigned DEFAULT NULL, + `entity_id` int(10) unsigned DEFAULT NULL, + `property_id` int(10) unsigned DEFAULT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX') DEFAULT NULL, + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + KEY `null_data_dom_ent_idx` (`domain_id`,`entity_id`), + KEY `null_forkey_ent` (`entity_id`), + KEY `null_forkey_pro` (`property_id`), + CONSTRAINT `null_forkey_dom` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `null_forkey_ent` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `null_forkey_pro` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `null_data` +-- + +LOCK TABLES `null_data` WRITE; +/*!40000 ALTER TABLE `null_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `null_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `passwd` +-- + +DROP TABLE IF EXISTS `passwd`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `passwd` ( + `principal` varbinary(255) NOT NULL, + `hash` varbinary(255) NOT NULL, + `alg` varchar(255) DEFAULT 'SHA-512', + `it` int(10) unsigned DEFAULT 5000, + `salt` varbinary(255) NOT NULL, + PRIMARY KEY (`principal`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `passwd` +-- + +LOCK TABLES `passwd` WRITE; +/*!40000 ALTER TABLE `passwd` DISABLE KEYS */; +/*!40000 ALTER TABLE `passwd` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `permissions` +-- + +DROP TABLE IF EXISTS `permissions`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `permissions` ( + `role` varbinary(255) NOT NULL, + `permissions` mediumtext NOT NULL, + PRIMARY KEY (`role`), + CONSTRAINT `perm_name_roles` FOREIGN KEY (`role`) REFERENCES `roles` (`name`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `permissions` +-- + +LOCK TABLES `permissions` WRITE; +/*!40000 ALTER TABLE `permissions` DISABLE KEYS */; +INSERT INTO `permissions` VALUES +('administration','[{\"grant\":\"true\",\"priority\":\"true\",\"permission\":\"*\"}]'); +/*!40000 ALTER TABLE `permissions` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `query_template_def` +-- + +DROP TABLE IF EXISTS `query_template_def`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `query_template_def` ( + `id` int(10) unsigned NOT NULL, + `definition` mediumtext NOT NULL, + PRIMARY KEY (`id`), + CONSTRAINT `query_template_def_ibfk_1` FOREIGN KEY (`id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `query_template_def` +-- + +LOCK TABLES `query_template_def` WRITE; +/*!40000 ALTER TABLE `query_template_def` DISABLE KEYS */; +/*!40000 ALTER TABLE `query_template_def` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `reference_data` +-- + +DROP TABLE IF EXISTS `reference_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `reference_data` ( + `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.', + `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.', + `property_id` int(10) unsigned NOT NULL COMMENT 'Property.', + `value` int(10) unsigned NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL COMMENT 'Status of this statement.', + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + `value_iversion` int(10) unsigned DEFAULT NULL, + KEY `entity_id` (`entity_id`,`property_id`), + KEY `ref_domain_id_entity` (`domain_id`), + KEY `ref_property_id_entity` (`property_id`), + KEY `ref_value_entity` (`value`), + KEY `value` (`value`,`value_iversion`), + CONSTRAINT `ref_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `ref_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `ref_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`), + CONSTRAINT `ref_value_entity` FOREIGN KEY (`value`) REFERENCES `entities` (`id`), + CONSTRAINT `reference_data_ibfk_1` FOREIGN KEY (`value`, `value_iversion`) REFERENCES `entity_version` (`entity_id`, `_iversion`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `reference_data` +-- + +LOCK TABLES `reference_data` WRITE; +/*!40000 ALTER TABLE `reference_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `reference_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `roles` +-- + +DROP TABLE IF EXISTS `roles`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `roles` ( + `name` varbinary(255) NOT NULL, + `description` mediumtext DEFAULT NULL, + PRIMARY KEY (`name`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `roles` +-- + +LOCK TABLES `roles` WRITE; +/*!40000 ALTER TABLE `roles` DISABLE KEYS */; +INSERT INTO `roles` VALUES +('administration','Users with this role have unrestricted permissions.'), +('anonymous','Users who did not authenticate themselves.'); +/*!40000 ALTER TABLE `roles` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `stats` +-- + +DROP TABLE IF EXISTS `stats`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `stats` ( + `name` varchar(255) NOT NULL, + `value` blob DEFAULT NULL, + PRIMARY KEY (`name`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `stats` +-- + +LOCK TABLES `stats` WRITE; +/*!40000 ALTER TABLE `stats` DISABLE KEYS */; +INSERT INTO `stats` VALUES +('RootBenchmark','��\0sr\0-org.caosdb.server.database.misc.RootBenchmark����Qk]\0\0xr\04org.caosdb.server.database.misc.TransactionBenchmark����Qk]\0J\0sinceL\0measurementst\0Ljava/util/Map;[\0stackTraceElementst\0[Ljava/lang/StackTraceElement;L\0\rsubBenchmarksq\0~\0xp\0\0�L� �sr\0java.util.HashMap���`�\0F\0\nloadFactorI\0 thresholdxp?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xur\0[Ljava.lang.StackTraceElement;F*<<�\"9\0\0xp\0\0\0sr\0java.lang.StackTraceElementa Ś&6݅\0B\0formatI\0\nlineNumberL\0classLoaderNamet\0Ljava/lang/String;L\0declaringClassq\0~\0\nL\0fileNameq\0~\0\nL\0\nmethodNameq\0~\0\nL\0\nmoduleNameq\0~\0\nL\0\rmoduleVersionq\0~\0\nxp\0\0Spt\0java.lang.Threadt\0Thread.javat\0\rgetStackTracet\0 java.baset\017.0.12sq\0~\0 \0\0!t\0appt\04org.caosdb.server.database.misc.TransactionBenchmarkt\0TransactionBenchmark.javat\0<init>ppsq\0~\0 \0\0\0�q\0~\0t\0-org.caosdb.server.database.misc.RootBenchmarkq\0~\0q\0~\0ppsq\0~\0 \0\0q\0~\0q\0~\0q\0~\0t\0<clinit>ppsq\0~\0 \0\0fq\0~\0t\0org.caosdb.server.CaosDBServert\0CaosDBServer.javat\0initBackendppsq\0~\0 \0\0\0�q\0~\0q\0~\0q\0~\0t\0mainppsq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0Infosr\0,org.caosdb.server.database.misc.SubBenchmark����Qk]\0L\0nameq\0~\0\nxq\0~\0\0\0�L� �sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0\nsq\0~\0 \0\0Spq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0 \0\0!q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0 \0\0\0�q\0~\0t\0,org.caosdb.server.database.misc.SubBenchmarkq\0~\0q\0~\0ppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0t\0getBenchmarkppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0 \0\0\0$q\0~\0t\02org.caosdb.server.transaction.TransactionInterfacet\0TransactionInterface.javat\0getTransactionBenchmarkppsq\0~\0 \0\0\00q\0~\0q\0~\0.q\0~\0/t\0executeppsq\0~\0 \0\0\0�q\0~\0t\0org.caosdb.server.utils.Infot\0 Info.javat\0syncDatabaseppsq\0~\0 \0\0\0�q\0~\0t\0/org.caosdb.server.database.misc.RootBenchmark$1q\0~\0t\0runppsq\0~\0 \0\0Hpq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0 SyncStatssq\0~\0\"\0\0�L� �sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0 sq\0~\0 \0\0Spq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0 \0\0!q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0 \0\0\0�q\0~\0q\0~\0)q\0~\0q\0~\0ppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0 \0\0\00q\0~\0q\0~\0.q\0~\0/q\0~\02ppsq\0~\0 \0\0\0�q\0~\0q\0~\04q\0~\05q\0~\06ppsq\0~\0 \0\0\0�q\0~\0q\0~\08q\0~\0q\0~\09ppsq\0~\0 \0\0Hpq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0t\0MySQLSyncStatssq\0~\0\"\0\0�L� �sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xuq\0~\0\0\0\0sq\0~\0 \0\0Spq\0~\0q\0~\0\rq\0~\0q\0~\0q\0~\0sq\0~\0 \0\0!q\0~\0q\0~\0q\0~\0q\0~\0ppsq\0~\0 \0\0\0�q\0~\0q\0~\0)q\0~\0q\0~\0ppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0 \0\0�q\0~\0q\0~\0q\0~\0q\0~\0+ppsq\0~\0 \0\0q\0~\0t\0-org.caosdb.server.database.BackendTransactiont\0BackendTransaction.javat\0getImplementationppsq\0~\0 \0\0\0+q\0~\0t\08org.caosdb.server.database.backend.transaction.SyncStatst\0SyncStats.javaq\0~\02ppsq\0~\0 \0\0\0�q\0~\0q\0~\0Tq\0~\0Ut\0executeTransactionppsq\0~\0 \0\0\01q\0~\0q\0~\0.q\0~\0/q\0~\02ppsq\0~\0 \0\0\0�q\0~\0q\0~\04q\0~\05q\0~\06ppsq\0~\0 \0\0\0�q\0~\0q\0~\08q\0~\0q\0~\09ppsq\0~\0 \0\0Hpq\0~\0q\0~\0\rq\0~\09q\0~\0q\0~\0sq\0~\0?@\0\0\0\0\0\0w\0\0\0\0\0\0\0xq\0~\0Jxq\0~\0<xq\0~\0!x'), +('TransactionBenchmark','��\0sr\00caosdb.server.database.misc.TransactionBenchmark�Cl=���E\0J\0sinceL\0acct\0Ljava/util/HashMap;L\0countsq\0~\0xp\0\0l���Wsr\0java.util.HashMap���`�\0F\0\nloadFactorI\0 thresholdxp?@\0\0\0\0\0w\0\0\0\0\0\0t\0 SyncStatssr\0java.lang.Long;��̏#�\0J\0valuexr\0java.lang.Number������\0\0xp\0\0\0\0\0\0\0t\0GetInfosq\0~\0\0\0\0\0\0\0 xsq\0~\0?@\0\0\0\0\0w\0\0\0\0\0\0q\0~\0sr\0java.lang.Integer⠤���8\0I\0valuexq\0~\0\0\0\0q\0~\0 sq\0~\0\0\0\0x'); +/*!40000 ALTER TABLE `stats` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `text_data` +-- + +DROP TABLE IF EXISTS `text_data`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `text_data` ( + `domain_id` int(10) unsigned NOT NULL COMMENT 'Domain.', + `entity_id` int(10) unsigned NOT NULL COMMENT 'Entity.', + `property_id` int(10) unsigned NOT NULL COMMENT 'Property.', + `value` text NOT NULL, + `status` enum('OBLIGATORY','RECOMMENDED','SUGGESTED','FIX','REPLACEMENT') NOT NULL COMMENT 'Status of this statement.', + `pidx` int(10) unsigned NOT NULL DEFAULT 0, + KEY `domain_id` (`domain_id`,`entity_id`), + KEY `str_entity_id_entity` (`entity_id`), + KEY `str_property_id_entity` (`property_id`), + CONSTRAINT `str_domain_id_entity` FOREIGN KEY (`domain_id`) REFERENCES `entities` (`id`), + CONSTRAINT `str_entity_id_entity` FOREIGN KEY (`entity_id`) REFERENCES `entities` (`id`), + CONSTRAINT `str_property_id_entity` FOREIGN KEY (`property_id`) REFERENCES `entities` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `text_data` +-- + +LOCK TABLES `text_data` WRITE; +/*!40000 ALTER TABLE `text_data` DISABLE KEYS */; +/*!40000 ALTER TABLE `text_data` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `transaction_log` +-- + +DROP TABLE IF EXISTS `transaction_log`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `transaction_log` ( + `transaction` varchar(255) NOT NULL COMMENT 'Transaction.', + `entity_id` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL, + `username` varbinary(255) NOT NULL, + `seconds` bigint(20) unsigned NOT NULL DEFAULT 0, + `nanos` int(10) unsigned NOT NULL DEFAULT 0, + `realm` varbinary(255) NOT NULL, + KEY `entity_id` (`entity_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `transaction_log` +-- + +LOCK TABLES `transaction_log` WRITE; +/*!40000 ALTER TABLE `transaction_log` DISABLE KEYS */; +/*!40000 ALTER TABLE `transaction_log` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `transactions` +-- + +DROP TABLE IF EXISTS `transactions`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `transactions` ( + `srid` varbinary(255) NOT NULL, + `username` varbinary(255) NOT NULL, + `realm` varbinary(255) NOT NULL, + `seconds` bigint(20) unsigned NOT NULL, + `nanos` int(10) unsigned NOT NULL, + PRIMARY KEY (`srid`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `transactions` +-- + +LOCK TABLES `transactions` WRITE; +/*!40000 ALTER TABLE `transactions` DISABLE KEYS */; +INSERT INTO `transactions` VALUES +('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e','administration','CaosDB',0,0); +/*!40000 ALTER TABLE `transactions` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `units_lin_con` +-- + +DROP TABLE IF EXISTS `units_lin_con`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `units_lin_con` ( + `signature_from` bigint(20) NOT NULL, + `signature_to` bigint(20) NOT NULL, + `a` decimal(65,30) NOT NULL, + `b_dividend` int(11) NOT NULL, + `b_divisor` int(11) NOT NULL, + `c` decimal(65,30) NOT NULL, + PRIMARY KEY (`signature_from`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `units_lin_con` +-- + +LOCK TABLES `units_lin_con` WRITE; +/*!40000 ALTER TABLE `units_lin_con` DISABLE KEYS */; +/*!40000 ALTER TABLE `units_lin_con` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `user_info` +-- + +DROP TABLE IF EXISTS `user_info`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `user_info` ( + `realm` varbinary(255) NOT NULL, + `name` varbinary(255) NOT NULL, + `email` varbinary(255) DEFAULT NULL, + `status` enum('ACTIVE','INACTIVE') NOT NULL DEFAULT 'INACTIVE', + `entity` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL, + PRIMARY KEY (`realm`,`name`), + KEY `subject_entity` (`entity`), + CONSTRAINT `subjects_ibfk_2` FOREIGN KEY (`entity`) REFERENCES `entity_ids` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `user_info` +-- + +LOCK TABLES `user_info` WRITE; +/*!40000 ALTER TABLE `user_info` DISABLE KEYS */; +/*!40000 ALTER TABLE `user_info` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Table structure for table `user_roles` +-- + +DROP TABLE IF EXISTS `user_roles`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `user_roles` ( + `realm` varbinary(255) NOT NULL, + `user` varbinary(255) NOT NULL, + `role` varbinary(255) NOT NULL, + PRIMARY KEY (`realm`,`user`,`role`), + KEY `user_roles_ibfk_1` (`role`), + CONSTRAINT `user_roles_ibfk_1` FOREIGN KEY (`role`) REFERENCES `roles` (`name`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `user_roles` +-- + +LOCK TABLES `user_roles` WRITE; +/*!40000 ALTER TABLE `user_roles` DISABLE KEYS */; +/*!40000 ALTER TABLE `user_roles` ENABLE KEYS */; +UNLOCK TABLES; + +-- +-- Dumping routines for database 'caosdb' +-- +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `CaosDBVersion` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `CaosDBVersion`() RETURNS varchar(255) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +RETURN 'v7.0.2' ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `constructDateTimeWhereClauseForColumn` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `constructDateTimeWhereClauseForColumn`(seconds_col VARCHAR(255), nanos_col VARCHAR(255), vDateTimeSecLow VARCHAR(255), vDateTimeNSLow VARCHAR(255), vDateTimeSecUpp VARCHAR(255), vDateTimeNSUpp VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +BEGIN + + DECLARE isInterval BOOLEAN DEFAULT vDateTimeSecUpp IS NOT NULL or vDateTimeNSUpp IS NOT NULL; + DECLARE operator_prefix CHAR(1) DEFAULT LEFT(operator,1); + + IF isInterval THEN + IF operator = '=' THEN + RETURN " 0=1"; + ELSEIF operator = '!=' THEN + RETURN " 0=1"; + ELSEIF operator = '>' or operator = '<=' THEN + RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecUpp); + ELSEIF operator = '<' or operator = '>=' THEN + RETURN CONCAT(" ", seconds_col, operator_prefix, vDateTimeSecLow); + ELSEIF operator = "(" THEN + RETURN CONCAT(" ", seconds_col, ">=", vDateTimeSecLow, " AND ",seconds_col, "<", vDateTimeSecUpp); + ELSEIF operator = "!(" THEN + RETURN CONCAT(" ", seconds_col, "<", vDateTimeSecLow, " OR ", seconds_col, ">=", vDateTimeSecUpp); + END IF; + ELSE + IF operator = '=' THEN + RETURN CONCAT(" ", + seconds_col, + "=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, CONCAT(' AND ', nanos_col, ' IS NULL'), CONCAT(' AND ', + nanos_col, + '=', vDateTimeNSLow))); + ELSEIF operator = '!=' THEN + RETURN CONCAT(" ", + seconds_col, + "!=", vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR ', + nanos_col, + '!=', vDateTimeNSLow))); + ELSEIF operator = '>' or operator = '<' THEN + RETURN CONCAT(" ", + seconds_col, operator, vDateTimeSecLow, IF(vDateTimeNSLow IS NULL, '', CONCAT(' OR (',seconds_col,'=', vDateTimeSecLow, ' AND ',nanos_col, operator, vDateTimeNSLow, ')'))); + ELSEIF operator = '>=' or operator = '<=' THEN + RETURN CONCAT( + " ", seconds_col, operator, vDateTimeSecLow, + IF(vDateTimeNSLow IS NULL, + '', + CONCAT( + ' AND (', seconds_col, operator_prefix, vDateTimeSecLow, + ' OR ', nanos_col, operator, vDateTimeNSLow, + ' OR ', nanos_col, ' IS NULL)'))); + ELSEIF operator = "(" THEN + RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"=", vDateTimeSecLow),CONCAT(" ",seconds_col,"=",vDateTimeSecLow," AND ",nanos_col,"=",vDateTimeNSLow)); + ELSEIF operator = "!(" THEN + RETURN IF(vDateTimeNSLow IS NULL,CONCAT(" ",seconds_col,"!=",vDateTimeSecLow, ""),CONCAT(" ",seconds_col,"!=",vDateTimeSecLow," OR ",nanos_col, " IS NULL OR ", nanos_col, "!=",vDateTimeNSLow)); + END IF; + END IF; + return ' 0=1'; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `convert_unit` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `convert_unit`(unit_sig BIGINT, value DECIMAL(65,30)) RETURNS decimal(65,30) + DETERMINISTIC +BEGIN + DECLARE ret DECIMAL(65,30) DEFAULT value; + + SELECT (((value+a)*b_dividend)/b_divisor+c) INTO ret FROM units_lin_con WHERE signature_from=unit_sig; + RETURN ret; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `getAggValueWhereClause` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `getAggValueWhereClause`(entities VARCHAR(255), properties VARCHAR(255)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +BEGIN + RETURN CONCAT(" EXISTS (SELECT 1 FROM `", entities, "` AS ent WHERE ent.id = subdata.entity_id LIMIT 1)", IF(properties IS NOT NULL AND properties != '', CONCAT(" AND EXISTS (SELECT 1 FROM `", properties, "` as props WHERE props.id = subdata.property_id LIMIT 1)"),'')); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `getDateTimeWhereClause` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `getDateTimeWhereClause`(vDateTime VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +BEGIN + DECLARE sep_loc INTEGER DEFAULT LOCATE('--',vDateTime); + DECLARE vDateTimeLow VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',1), vDateTime); + DECLARE vDateTimeUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTime, '--',-1), NULL); + + DECLARE vDateTimeSecLow VARCHAR(255) DEFAULT SUBSTRING_INDEX(vDateTimeLow, 'UTC', 1); + DECLARE vDateTimeNSLow VARCHAR(255) DEFAULT IF(SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1)='',NULL,SUBSTRING_INDEX(vDateTimeLow, 'UTC', -1)); + + DECLARE vDateTimeSecUpp VARCHAR(255) DEFAULT IF(sep_loc != 0, SUBSTRING_INDEX(vDateTimeUpp, 'UTC', 1), NULL); + DECLARE vDateTimeNSUpp VARCHAR(255) DEFAULT IF(sep_loc != 0 AND SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1)!='',SUBSTRING_INDEX(vDateTimeUpp, 'UTC', -1),NULL); + + + RETURN constructDateTimeWhereClauseForColumn("subdata.value", "subdata.value_ns", vDateTimeSecLow, vDateTimeNSLow, vDateTimeSecUpp, vDateTimeNSUpp, operator); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `getDateWhereClause` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `getDateWhereClause`(vDateTimeDotNotation VARCHAR(255), operator CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +BEGIN + DECLARE isInterval INTEGER DEFAULT LOCATE('--',vDateTimeDotNotation); + + DECLARE vILB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', 1), vDateTimeDotNotation); + + DECLARE vEUB VARCHAR(255) DEFAULT IF(isInterval != 0, SUBSTRING_INDEX(vDateTimeDotNotation, '--', -1), NULL); + DECLARE vILB_Date INTEGER DEFAULT SUBSTRING_INDEX(vILB, '.', 1); + DECLARE vEUB_Date INTEGER DEFAULT SUBSTRING_INDEX(vEUB, '.', 1); + + DECLARE hasTime INTEGER DEFAULT LOCATE('.NULL.NULL',vILB); + + DECLARE dom INTEGER DEFAULT vILB_Date % 100; + + DECLARE mon INTEGER DEFAULT ((vILB_Date % 10000) - dom) / 100; + + DECLARE yea INTEGER DEFAULT (vILB_Date - (vILB_Date % 10000)) / 10000; + + IF operator = '=' and hasTime != 0 THEN + RETURN CONCAT(" subdata.value=", vILB_Date); + ELSEIF operator = "!=" and hasTime != 0 THEN + IF mon != 0 and dom != 0 THEN + RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100!=0"); + ELSEIF mon != 0 THEN + RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%100=0 and subdata.value%10000!=0"); + ELSE + RETURN CONCAT(" subdata.value!=", vILB_Date, " and subdata.value%10000=0"); + END IF; + ELSEIF operator = "(" and hasTime != 0 THEN + IF mon != 0 and dom != 0 THEN + RETURN CONCAT(" subdata.value=", vILB_Date); + ELSEIF mon != 0 THEN + RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date, " and subdata.value%10000!=0)"); + ELSE + RETURN CONCAT(" subdata.value=",vILB_Date," OR (subdata.value>", vILB_Date, " and subdata.value<", vEUB_Date,")"); + END IF; + ELSEIF operator = "!(" THEN + IF hasTime = 0 THEN + RETURN " 0=0"; + END IF; + IF mon != 0 and dom != 0 THEN + RETURN CONCAT(" subdata.value!=",vILB_Date); + ELSEIF mon != 0 THEN + RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%100=0) OR ((subdata.value<", vILB_Date, " or subdata.value>", vEUB_Date, ") and subdata.value%100!=0)"); + ELSE + RETURN CONCAT(" (subdata.value!=",vILB_Date, " AND subdata.value%10000=0) OR ((subdata.value<", vILB_Date, " or subdata.value>=", vEUB_Date, ") and subdata.value%10000!=0)"); + END IF; + ELSEIF operator = "<" THEN + IF mon != 0 and dom != 0 THEN + RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%100!=0 or (subdata.value<", yea*10000+mon*100, " and subdata.value%10000!=0) or (subdata.value<", yea*10000, " and subdata.value%10000=0))"); + ELSEIF mon != 0 THEN + RETURN CONCAT(" subdata.value<", vILB_Date, " and (subdata.value%10000!=0 or (subdata.value<", yea*10000, "))"); + ELSE + RETURN CONCAT(" subdata.value<", vILB_Date); + END IF; + ELSEIF operator = ">" THEN + IF mon != 0 and dom != 0 THEN + RETURN CONCAT(" subdata.value>", vILB_Date); + ELSEIF mon != 0 THEN + RETURN CONCAT(" subdata.value>=",vEUB_Date); + ELSE + RETURN CONCAT(" subdata.value>=",vEUB_Date); + END IF; + ELSEIF operator = "<=" THEN + IF mon != 0 and dom != 0 THEN + + RETURN CONCAT(" subdata.value<=", vILB_Date, + " or (subdata.value<=", yea*10000 + mon*100, " and subdata.value%100=0)"); + ELSEIF mon != 0 THEN + + RETURN CONCAT(" subdata.value<", vEUB_Date); + ELSE + + RETURN CONCAT(" subdata.value<", vEUB_Date); + END IF; + ELSEIF operator = ">=" THEN + IF mon != 0 and dom != 0 THEN + + RETURN CONCAT(" subdata.value>=", vILB_Date, + " or (subdata.value>=", yea*10000 + mon*100, " and subdata.value%100=0)", + " or (subdata.value>=", yea*10000, " and subdata.value%10000=0)"); + ELSEIF mon != 0 THEN + + RETURN CONCAT(" subdata.value>=", yea*10000 + mon*100, + " or (subdata.value>=", yea*10000, " and subdata.value%10000=0)"); + ELSE + + RETURN CONCAT(" subdata.value>=", yea*10000); + END IF; + END IF; + + return ' 0=1'; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `getDoubleWhereClause` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `getDoubleWhereClause`(value DOUBLE, unit_sig BIGINT, valueStdUnit DECIMAL(65,30), stdUnit_sig BIGINT, o CHAR(4)) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + DETERMINISTIC +BEGIN + RETURN IF(unit_sig IS NULL AND value IS NOT NULL, + CONCAT('subdata.value ', o, ' \'', value, '\''), + CONCAT( + IF(value IS NULL, '', + CONCAT('(subdata.unit_sig=', unit_sig, ' AND subdata.value ', o, ' \'', value, '\') OR ')), + IF(unit_sig = stdUnit_sig,'',CONCAT('(subdata.unit_sig=', stdUnit_sig,' AND subdata.value ', o, ' \'', valueStdUnit, '\') OR ')),'(standard_unit(subdata.unit_sig)=', stdUnit_sig,' AND convert_unit(subdata.unit_sig,subdata.value) ', o, ' ', valueStdUnit, ')')); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `get_head_relative` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `get_head_relative`(EntityID VARCHAR(255), + HeadOffset INT UNSIGNED) RETURNS varbinary(255) + READS SQL DATA +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + + + + + RETURN ( + SELECT e.version + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID + ORDER BY e._iversion DESC + LIMIT 1 OFFSET HeadOffset + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `get_head_version` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `get_head_version`(EntityID VARCHAR(255)) RETURNS varbinary(255) + READS SQL DATA +BEGIN + RETURN get_head_relative(EntityID, 0); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `get_iversion` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `get_iversion`(InternalEntityID INT UNSIGNED, + Version VARBINARY(255)) RETURNS int(10) unsigned + READS SQL DATA +BEGIN + RETURN ( + SELECT e._iversion + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID + AND e.version = Version + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `get_primary_parent_version` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `get_primary_parent_version`(EntityID VARCHAR(255), + Version VARBINARY(255)) RETURNS varbinary(255) + READS SQL DATA +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + RETURN ( + SELECT p.version + FROM entity_version AS e INNER JOIN entity_version AS p + ON (e._ipparent = p._iversion + AND e.entity_id = p.entity_id) + WHERE e.entity_id = InternalEntityID + AND e.version = Version + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `get_version_timestamp` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `get_version_timestamp`(EntityID VARCHAR(255), + Version VARBINARY(255)) RETURNS varchar(255) CHARSET utf8 COLLATE utf8_unicode_ci + READS SQL DATA +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + RETURN ( + SELECT concat(t.seconds, '.', t.nanos) + FROM entity_version AS e INNER JOIN transactions AS t + ON ( e.srid = t.srid ) + WHERE e.entity_id = InternalEntityID + AND e.version = Version + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `is_feature_config` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `is_feature_config`(_Key VARCHAR(255), + Expected VARCHAR(255)) RETURNS tinyint(1) + READS SQL DATA +BEGIN + RETURN ( + SELECT f._value = Expected FROM feature_config as f WHERE f._key = _Key + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `makeStmt` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `makeStmt`(sourceSet VARCHAR(255), targetSet VARCHAR(255), data VARCHAR(20000), + properties VARCHAR(20000), versioned BOOLEAN) RETURNS varchar(20000) CHARSET utf8 COLLATE utf8_unicode_ci + NO SQL +BEGIN + IF sourceSet = "entities" AND versioned THEN + RETURN CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id, _iversion) SELECT entity_id, _iversion FROM ', + data, + IF(properties IS NULL, '', + CONCAT(' AS data JOIN `', properties, '` AS prop ON (data.property_id = prop.id) WHERE ', + 'data.entity_id = prop.id2 OR prop.id2 = 0'))); + END IF; + RETURN CONCAT( + IF(targetSet IS NULL, + CONCAT('DELETE FROM `',sourceSet,'` WHERE NOT EXISTS (SELECT 1 FROM '), + CONCAT('INSERT IGNORE INTO `',targetSet,'` (id) SELECT id FROM `',sourceSet,'` ', + 'WHERE EXISTS (SELECT 1 FROM ')), + IF(properties IS NULL, + CONCAT(data,' as data WHERE '), + CONCAT('`',properties,'` as prop JOIN ',data,' as data ON (data.property_id=prop.id) WHERE ', + '(data.entity_id=prop.id2 OR prop.id2=0) AND ')), + 'data.entity_id=`', sourceSet, '`.`id` LIMIT 1)' + ); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `standard_unit` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `standard_unit`(unit_sig BIGINT) RETURNS bigint(20) + DETERMINISTIC +BEGIN + DECLARE ret BIGINT DEFAULT unit_sig; + + SELECT signature_to INTO ret FROM units_lin_con WHERE signature_from=unit_sig; + RETURN ret; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `_get_head_iversion` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `_get_head_iversion`(InternalEntityID INT UNSIGNED) RETURNS int(10) unsigned + READS SQL DATA +BEGIN + + + + + RETURN ( + SELECT e._iversion + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID + ORDER BY e._iversion DESC + LIMIT 1 + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP FUNCTION IF EXISTS `_get_version` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` FUNCTION `_get_version`(InternalEntityID INT UNSIGNED, + IVersion INT UNSIGNED) RETURNS varbinary(255) + READS SQL DATA +BEGIN + RETURN ( + SELECT version FROM entity_version + WHERE entity_id = InternalEntityID + AND _iversion = IVersion + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applyBackReference` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applyBackReference`(in sourceSet VARCHAR(255), targetSet VARCHAR(255), + in propertiesTable VARCHAR(255), in entitiesTable VARCHAR(255), in subQuery BOOLEAN, + in versioned BOOLEAN) +BEGIN + DECLARE newTableName VARCHAR(255) DEFAULT NULL; + + + IF subQuery IS TRUE THEN + call registerTempTableName(newTableName); + + SET @createBackRefSubQueryTableStr = CONCAT('CREATE TEMPORARY TABLE `',newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (id, entity_id))'); + + PREPARE createBackRefSubQueryTable FROM @createBackRefSubQueryTableStr; + EXECUTE createBackRefSubQueryTable; + DEALLOCATE PREPARE createBackRefSubQueryTable; + + SET @backRefSubResultSetStmtStr = CONCAT('INSERT IGNORE INTO `', + newTableName, + '` (id,entity_id) SELECT entity_id AS id, value AS entity_id FROM `reference_data` AS data ', + 'WHERE EXISTS (SELECT 1 FROM `', + sourceSet, + '` AS source WHERE source.id=data.value LIMIT 1)', + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')) + ); + + PREPARE backRefSubResultSetStmt FROM @backRefSubResultSetStmtStr; + EXECUTE backRefSubResultSetStmt; + DEALLOCATE PREPARE backRefSubResultSetStmt; + + SELECT newTableName as list; + ELSE + IF versioned THEN + IF sourceSet = "entities" THEN + + SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id, _iversion) SELECT source.id, _get_head_iversion(source.id)', + + ' FROM entities AS source WHERE EXISTS (', + 'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND (', + 'data.value_iversion IS NULL OR data.value_iversion=_get_head_iversion(source.id))', + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + ') UNION ALL ', + + 'SELECT source.id, source._iversion FROM archive_entities AS source WHERE EXISTS (', + 'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND ', + '(data.value_iversion IS NULL OR data.value_iversion=source._iversion)', + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + + ')'); + ELSEIF targetSet IS NULL OR sourceSet = targetSet THEN + SET @stmtBackRefStr = CONCAT('DELETE FROM `', + sourceSet, + '` WHERE NOT EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=`', + sourceSet, + '`.`id` AND ( data.value_iversion IS NULL OR data.value_iversion=`', + sourceSet, + '`._iversion)', + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + ')'); + ELSE + SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id, _iversion) SELECT source.id, source._iversion FROM `', + sourceSet, + '` AS source WHERE EXISTS (', + 'SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id AND', + ' (data.value_iversion IS NULL OR data.value_iversion=source._iversion)', + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + + ')'); + END IF; + ELSE + + IF targetSet IS NULL OR sourceSet = targetSet THEN + + SET @stmtBackRefStr = CONCAT('DELETE FROM `', + sourceSet, + '` WHERE NOT EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=`', + sourceSet, + '`.`id`', + IF(entitiesTable IS NULL, + '', + CONCAT(' + AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' + AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + ')'); + ELSE + + SET @stmtBackRefStr = CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id) SELECT id FROM `', + sourceSet, + '` AS source WHERE EXISTS (SELECT 1 FROM `reference_data` AS data WHERE data.value=source.id', + IF(entitiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + entitiesTable, + '` AS e WHERE e.id=data.entity_id LIMIT 1)')), + IF(propertiesTable IS NULL, + '', + CONCAT(' AND EXISTS (SELECT 1 FROM `', + propertiesTable, + '` AS p WHERE p.id=data.property_id LIMIT 1)')), + ')'); + END IF; + END IF; + + PREPARE stmtBackRef FROM @stmtBackRefStr; + EXECUTE stmtBackRef; + DEALLOCATE PREPARE stmtBackRef; + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applyIDFilter` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applyIDFilter`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), + in o CHAR(2), in EntityID VARCHAR(255), in agg CHAR(3), in versioned BOOLEAN) +IDFILTER_LABEL: BEGIN +DECLARE data VARCHAR(20000) DEFAULT NULL; +DECLARE aggVal VARCHAR(255) DEFAULT NULL; +DECLARE direction CHAR(4) DEFAULT NULL; +DECLARE entity_id_type VARCHAR(255) DEFAULT "eids.id "; + + +IF agg IS NOT NULL THEN + IF versioned THEN + + SELECT 1 FROM id_agg_with_versioning_not_implemented; + ELSEIF agg = "max" THEN + SET direction = "DESC"; + ELSEIF agg = "min" THEN + SET direction = "ASC "; + ELSE + SELECT 1 FROM unknown_agg_parameter; + END IF; + + SET @stmtIDAggValStr = CONCAT( + "SELECT e.internal_id INTO @sAggVal FROM `", + sourceSet, + "` AS s LEFT JOIN entity_ids AS e ON (s.id=e.internal_id) WHERE s.id>99 ORDER BY CAST(e.id AS UNSIGNED INT) ", + direction, + " LIMIT 1"); + + PREPARE stmtIDAggVal FROM @stmtIDAggValStr; + EXECUTE stmtIDAggVal; + DEALLOCATE PREPARE stmtIDAggVal; + SET aggVal = @sAggVal; +END IF; + +IF o = ">" OR o = ">=" OR o = "<" or o = "<=" THEN + SET entity_id_type = "CAST(eids.id AS UNSIGNED INT) "; +END IF; + + +IF targetSet IS NULL OR targetSet = sourceSet THEN + SET data = CONCAT( + "DELETE FROM `", + sourceSet, + "` WHERE ", + IF(o IS NULL OR EntityID IS NULL, + "1=1", + CONCAT("NOT EXISTS (SELECT 1 FROM entity_ids AS eids WHERE ", + entity_id_type, + o, + ' "', + EntityID, + '" ', + " AND eids.internal_id = `", + sourceSet, + "`.id)" + )), + IF(aggVal IS NULL, + "", + CONCAT(" AND `", sourceSet, "`.id!=", + aggVal))); +ELSEIF versioned AND sourceSet = "entities" THEN + + SET data = CONCAT( + "INSERT IGNORE INTO `", + targetSet, + '` (id, _iversion) SELECT e.id, _get_head_iversion(e.id) FROM `entities` AS e JOIN entity_ids AS eids ON (e.id = eids.internal_id) WHERE ', + IF(o IS NULL OR EntityID IS NULL, + "1=1", + CONCAT(entity_id_type, + o, + ' "', + EntityID, + '"' + )), + IF(aggVal IS NULL, + "", + CONCAT(" AND e.id=", + aggVal)), + ' UNION SELECT e.id, _iversion FROM `archive_entities` AS e JOIN entity_ids AS eids ON (e.id = eids.internal_id) WHERE ', + IF(o IS NULL OR EntityID IS NULL, + "1=1", + CONCAT(entity_id_type, + o, + ' "', + EntityID, + '"' + )), + IF(aggVal IS NULL, + "", + CONCAT(" AND e.id=", + aggVal))); + + +ELSE + SET data = CONCAT( + "INSERT IGNORE INTO `", + targetSet, + IF(versioned, + '` (id, _iversion) SELECT data.id, data._iversion FROM `', + '` (id) SELECT data.id FROM `'), + sourceSet, + "` AS data JOIN entity_ids AS eids ON (eids.internal_id = data.id) WHERE ", + IF(o IS NULL OR EntityID IS NULL, + "1=1", + CONCAT(entity_id_type, + o, + ' "', + EntityID, + '"' + )), + IF(aggVal IS NULL, + "", + CONCAT(" AND data.id=", + aggVal))); +END IF; + +Set @stmtIDFilterStr = data; +PREPARE stmtIDFilter FROM @stmtIDFilterStr; +EXECUTE stmtIDFilter; +DEALLOCATE PREPARE stmtIDFilter; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applyPOV` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applyPOV`(in sourceSet VARCHAR(255), + in targetSet VARCHAR(255), + in propertiesTable VARCHAR(255), + in refIdsTable VARCHAR(255), + in o CHAR(4), + in vText VARCHAR(255), + in vInt INT, + in vDouble DOUBLE, + in unit_sig BIGINT, + in vDoubleStdUnit DOUBLE, + in stdUnit_sig BIGINT, + in vDateTime VARCHAR(255), + in vDateTimeDotNotation VARCHAR(255), + in agg CHAR(3), + in pname VARCHAR(255), + in versioned BOOLEAN) +POV_LABEL: BEGIN + DECLARE data TEXT DEFAULT NULL; + DECLARE sTextData VARCHAR(20000) DEFAULT NULL; + DECLARE sNameData VARCHAR(20000) DEFAULT NULL; + DECLARE sEnumData VARCHAR(20000) DEFAULT NULL; + DECLARE sIntData VARCHAR(20000) DEFAULT NULL; + DECLARE sDoubleData VARCHAR(20000) DEFAULT NULL; + DECLARE sDatetimeData VARCHAR(20000) DEFAULT NULL; + DECLARE sNullData VARCHAR(20000) DEFAULT NULL; + DECLARE sDateData VARCHAR(20000) DEFAULT NULL; + DECLARE sRefData VARCHAR(20000) DEFAULT NULL; + DECLARE aggValue VARCHAR(255) DEFAULT NULL; + DECLARE aggValueWhereClause VARCHAR(20000) DEFAULT NULL; + DECLARE distinctUnits INT DEFAULT 0; + DECLARE usedStdUnit BIGINT DEFAULT NULL; + DECLARE keepTabl VARCHAR(255) DEFAULT NULL; + DECLARE existence_op VARCHAR(255) DEFAULT "EXISTS"; + + + + + + IF o = '->' THEN + + call applyRefPOV(sourceSet,targetSet, propertiesTable, refIdsTable, versioned); + LEAVE POV_LABEL; + ELSEIF o = '0' THEN + + + SET vText = NULL; + SET sTextData = 'SELECT domain_id, entity_id, property_id FROM `null_data` AS subdata'; + + + + ELSEIF o = '!0' THEN + + + SET vText = NULL; + + SET sTextData = CONCAT( + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata ', + 'WHERE subdata.value IS NOT NULL'); + + ELSEIF o = "(" or o = "!(" THEN + IF versioned THEN + SET sTextData = IF(vText IS NULL, + CONCAT( + ' SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) ', + 'AS _iversion, property_id FROM `date_data` UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data`'), + IF(vDateTimeDotNotation IS NULL, NULL, + CONCAT(' SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) ', + 'AS _iversion, property_id FROM `date_data` AS subdata WHERE ', + getDateWhereClause(vDateTimeDotNotation, o), ' UNION ALL ', + 'SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data` ', + 'AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation, o)))); + SET sDatetimeData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data`', + IF(vDateTime IS NULL, NULL, + CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o), ' UNION ALL SELECT DISTINCT domain_id, entity_id,_iversion, property_id FROM `archive_datetime_data` AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o)))); + ELSE + SET sTextData = IF(vText IS NULL, + ' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`', + IF(vDateTimeDotNotation IS NULL, NULL, + CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ', + getDateWhereClause(vDateTimeDotNotation, o)))); + SET sDatetimeData = IF(vText IS NULL, + ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`', + IF(vDateTime IS NULL, NULL, + CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` ', + 'AS subdata WHERE ', getDateTimeWhereClause(vDateTime, o)))); + END IF; + SET vText = NULL; + ELSEIF agg IS NOT NULL THEN + IF versioned THEN + SELECT 1 FROM versioned_agg_pov_filter_not_implemented; + END IF; + + + + SET aggValueWhereClause = CONCAT(getDoubleWhereClause(vDouble, unit_sig, vDoubleStdUnit, stdUnit_sig, o), ' AND '); + SET aggValueWhereClause = CONCAT(IF(aggValueWhereClause IS NULL, '', aggValueWhereClause), getAggValueWhereClause(sourceSet, propertiesTable)); + + + SET @aggValueStmtStr = CONCAT('SELECT ',agg,'(subdata.value), ', agg, '(convert_unit(subdata.unit_sig,subdata.value)), COUNT(DISTINCT standard_unit(subdata.unit_sig)), max(standard_unit(subdata.unit_sig)) INTO @sAggValue, @sAggValueConvert, @distinctUnits, @StdUnitSig FROM (SELECT entity_id, property_id, value, unit_sig FROM `integer_data` UNION ALL SELECT entity_id, property_id, value, unit_sig FROM `double_data`) AS subdata WHERE ', aggValueWhereClause); + + + PREPARE stmtAggValueStmt FROM @aggValueStmtStr; + EXECUTE stmtAggValueStmt; + DEALLOCATE PREPARE stmtAggValueStmt; + + SET distinctUnits = @distinctUnits; + SET aggValue = @sAggValue; + + + IF distinctUnits = 1 THEN + SET aggValue = @sAggValueConvert; + SET usedStdUnit = @StdUnitSig; + ELSE + call raiseWarning(CONCAT("The filter POV(",IF(pname IS NULL, 'NULL', pname),",",IF(o IS NULL, 'NULL', o),",",IF(vText IS NULL, 'NULL', vText),") with the aggregate function '", agg, "' could not match the values against each other with their units. The values had different base units. Only their numric value had been taken into account." )); + END IF; + + IF aggValue IS NULL THEN + SET sTextData = 'SELECT NULL as domain_id, NULL as entity_id, NULL as property_id'; + ELSE + SET sTextData = ''; + SET sIntData = CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '=')); + SET sDoubleData = CONCAT(' SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` as subdata WHERE ', getDoubleWhereClause(aggValue, usedStdUnit, aggValue, usedStdUnit, '=')); + END IF; + + SET vText = NULL; + ELSE + + IF versioned THEN + SET sTextData = IF(vText IS NULL, + 'SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `text_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_text_data` ', + CONCAT( + 'SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id ', + 'FROM `text_data` AS subdata WHERE subdata.value ', o,' ? ', + 'UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id ', + 'FROM `archive_text_data` AS subdata WHERE subdata.value ', o, '?' + )); + SET sNameData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `name_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_name_data` ', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `name_data` AS subdata WHERE subdata.value ', o, ' ? UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_name_data` AS subdata WHERE subdata.value ', o, '?')); + SET sEnumData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `enum_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_enum_data` ', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `enum_data` AS subdata WHERE subdata.value ', o, ' ? UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_enum_data` AS subdata WHERE subdata.value ', o, '?')); + IF o = "!=" AND refIdsTable IS NOT NULL THEN + SET existence_op = "NOT EXISTS"; + END IF; + SET sRefData = IF(vText IS NULL, + ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `reference_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_reference_data`', + IF(refIdsTable IS NULL, + NULL, + CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `reference_data` AS subdata WHERE ', existence_op, ' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT" UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_reference_data` AS subdata WHERE ', existence_op, ' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT"'))); + SET sDoubleData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, _get_head_iversion(subdata.entity_id) AS _iversion, subdata.property_id FROM `double_data` AS subdata UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_double_data` ', IF(vDouble IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id), property_id FROM `double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble,unit_sig,vDoubleStdUnit,stdUnit_sig,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble, unit_sig, vDoubleStdUnit, stdUnit_sig, o)))); + SET sIntData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, _get_head_iversion(subdata.entity_id) AS _iversion, subdata.property_id FROM `integer_data` AS subdata UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_integer_data`', IF(vInt IS NULL AND vDoubleStdUnit IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o)))); + SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o)))); + SET sDateData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `date_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `date_data` AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation,o), ' UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_date_data` AS subdata WHERE ', getDateWhereClause(vDateTimeDotNotation,o)))); + SET sNullData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id FROM `null_data` UNION ALL SELECT DISTINCT domain_id, entity_id, _iversion, property_id FROM `archive_null_data`', NULL); + + ELSE + SET sTextData = IF(vText IS NULL, 'SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data`', CONCAT('SELECT DISTINCT domain_id, entity_id, property_id FROM `text_data` AS subdata WHERE subdata.value ',o,' ?')); + SET sNameData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data`', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `name_data` AS subdata WHERE subdata.value ', o, ' ?')); + SET sEnumData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data`', CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `enum_data` AS subdata WHERE subdata.value ', o, ' ?')); + IF o = "!=" AND refIdsTable IS NOT NULL THEN + SET existence_op = "NOT EXISTS"; + END IF; + + SET sRefData = IF(vText IS NULL, + ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data`', + IF(refIdsTable IS NULL, + NULL, + CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `reference_data` AS subdata WHERE ',existence_op ,' (SELECT 1 FROM `', refIdsTable, '` AS refIdsTable WHERE subdata.value=refIdsTable.id LIMIT 1) AND subdata.status != "REPLACEMENT"'))); + SET sDoubleData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `double_data` AS subdata', IF(vDouble IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `double_data` AS subdata WHERE ', getDoubleWhereClause(vDouble,unit_sig,vDoubleStdUnit,stdUnit_sig,o)))); + SET sIntData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT subdata.domain_id, subdata.entity_id, subdata.property_id FROM `integer_data` AS subdata', IF(vInt IS NULL AND vDoubleStdUnit IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `integer_data` AS subdata WHERE ', getDoubleWhereClause(vInt, unit_sig, vDoubleStdUnit, stdUnit_sig, o)))); + SET sDatetimeData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data`', IF(vDateTime IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `datetime_data` AS subdata WHERE ',getDateTimeWhereClause(vDateTime,o)))); + SET sDateData = IF(vText IS NULL,' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data`', IF(vDateTimeDotNotation IS NULL, NULL, CONCAT(' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `date_data` AS subdata WHERE ',getDateWhereClause(vDateTimeDotNotation,o)))); + SET sNullData = IF(vText IS NULL, ' UNION ALL SELECT DISTINCT domain_id, entity_id, property_id FROM `null_data`', NULL); + END IF; + + END IF; + + + SET data = CONCAT('(',sTextData, + IF(sNameData IS NULL, '', sNameData), + IF(sEnumData IS NULL, '', sEnumData), + IF(sDoubleData IS NULL, '', sDoubleData), + IF(sIntData IS NULL, '', sIntData), + IF(sDatetimeData IS NULL, '', sDatetimeData), + IF(sDateData IS NULL, '', sDateData), + IF(sRefData IS NULL, '', sRefData), + IF(sNullData IS NULL, '', sNullData), + ')' + ); + + + call createTmpTable(keepTabl, versioned); + IF versioned THEN + + + SET @stmtPOVkeepTblStr = CONCAT( + 'INSERT IGNORE INTO `', keepTabl, '` (id, _iversion) SELECT entity_id AS id, _iversion FROM ', data, + ' as data', IF(propertiesTable IS NULL, '', CONCAT( + ' WHERE EXISTS (Select 1 from `', propertiesTable, '` AS prop ', + 'WHERE prop.id = data.property_id AND (prop.id2=data.entity_id OR prop.id2=0))'))); + + IF targetSet IS NOT NULL THEN + SET @stmtPOVStr = CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id, _iversion) SELECT source.id, source._iversion FROM `', + keepTabl, + '` AS source'); + ELSE + + SET @stmtPOVStr = CONCAT('DELETE FROM `', + sourceSet, + '` WHERE NOT EXISTS (SELECT 1 FROM `', + keepTabl, + '` AS data WHERE data.id=`', + sourceSet, + '`.`id` AND data._iversion=`', + sourceSet, + '`._iversion LIMIT 1)'); + + END IF; + + + PREPARE stmt3 FROM @stmtPOVStr; + PREPARE stmtPOVkeepTbl FROM @stmtPOVkeepTblStr; + IF vText IS NULL THEN + EXECUTE stmtPOVkeepTbl; + ELSE + SET @vText = vText; + EXECUTE stmtPOVkeepTbl USING @vText, @vText, @vText, @vText, @vText, @vText; + END IF; + EXECUTE stmt3; + DEALLOCATE PREPARE stmt3; + DEALLOCATE PREPARE stmtPOVkeepTbl; + ELSE + + SET @stmtPOVkeepTblStr = CONCAT( + 'INSERT IGNORE INTO `', keepTabl, + '` (id) SELECT DISTINCT entity_id AS id FROM ', data, ' as data', + IF(propertiesTable IS NULL, '', + CONCAT(' WHERE EXISTS (Select 1 from `', propertiesTable, + '` AS prop WHERE prop.id = data.property_id AND + (prop.id2=data.entity_id OR prop.id2=0))'))); + + + SET @stmtPOVStr = CONCAT( + IF(targetSet IS NULL, + CONCAT('DELETE FROM `', + sourceSet, + '` WHERE NOT EXISTS (SELECT 1 FROM `'), + CONCAT('INSERT IGNORE INTO `', + targetSet, + '` (id) SELECT id FROM `', + sourceSet, + '` WHERE EXISTS (SELECT 1 FROM `')), + keepTabl, + '` AS data WHERE data.id=`', + sourceSet, + '`.`id` LIMIT 1)' + ); + + + PREPARE stmt3 FROM @stmtPOVStr; + PREPARE stmtPOVkeepTbl FROM @stmtPOVkeepTblStr; + IF vText IS NULL THEN + EXECUTE stmtPOVkeepTbl; + ELSE + SET @vText = vText; + EXECUTE stmtPOVkeepTbl USING @vText, @vText, @vText; + END IF; + EXECUTE stmt3; + DEALLOCATE PREPARE stmt3; + DEALLOCATE PREPARE stmtPOVkeepTbl; + END IF; + + SELECT @stmtPOVkeepTblStr as applyPOVStmt1, @stmtPOVStr as applyPOVStmt2, keepTabl as applyPOVIntermediateResultSet; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applyRefPOV` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applyRefPOV`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), + in properties VARCHAR(255), in refs VARCHAR(255), + in versioned BOOLEAN) +BEGIN + DECLARE data VARCHAR(20000) DEFAULT CONCAT( + '(SELECT domain_id, entity_id, property_id FROM `reference_data` AS subdata ', + 'WHERE EXISTS (SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1))'); + + IF versioned THEN + SET data = CONCAT( + '(SELECT domain_id, entity_id, _get_head_iversion(entity_id) AS _iversion, property_id ', + 'FROM `reference_data` AS subdata WHERE EXISTS (', + 'SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1) ', + 'UNION ALL SELECT domain_id, entity_id, _iversion, property_id ', + 'FROM `archive_reference_data` AS subdata WHERE EXISTS (', + 'SELECT 1 FROM `', refs, '` AS refs WHERE subdata.value=refs.id LIMIT 1))'); + END IF; + SET @stmtRefPOVStr = makeStmt(sourceSet,targetSet,data,properties, versioned); + + PREPARE stmt4 FROM @stmtRefPOVStr; + EXECUTE stmt4; + + SELECT @stmtRefPOVstr as applyRefPOVStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applySAT` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applySAT`(in sourceSet VARCHAR(255), in targetSet VARCHAR(255), in loc MEDIUMTEXT, in op CHAR(5)) +BEGIN + + IF targetSet IS NULL OR sourceSet = targetSet THEN + SET @stmtSATString = CONCAT('DELETE FROM `', sourceSet, '` WHERE id NOT IN (SELECT file_id FROM files WHERE path ', op, ' ?)'); + ELSE + SET @stmtSATString = CONCAT('INSERT INTO `', targetSet, '` (id) SELECT data.id FROM `',sourceSet,'` as data WHERE EXISTS (SELECT 1 FROM `files` as f WHERE f.file_id=data.id AND f.path ', op, ' ?)'); + END IF; + PREPARE stmtSAT FROM @stmtSATString; + SET @loc = loc; + EXECUTE stmtSAT USING @loc; + DEALLOCATE PREPARE stmtSAT; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `applyTransactionFilter` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `applyTransactionFilter`(in sourceSet VARCHAR(255), targetSet VARCHAR(255), in transaction VARCHAR(255), in operator_u CHAR(2), in realm VARCHAR(255), in userName VARCHAR(255), in ilb BIGINT, in ilb_nanos INT UNSIGNED, in eub BIGINT, in eub_nanos INT UNSIGNED, in operator_t CHAR(2)) +BEGIN + DECLARE data TEXT default CONCAT("(SELECT internal_id AS entity_id FROM transaction_log AS t JOIN entity_ids AS eids ON ( t.entity_id = eids.id ) WHERE t.transaction='", + transaction, + "'", + IF(userName IS NOT NULL, + CONCAT(' AND t.realm', operator_u, '? AND t.username', operator_u, '?'), + '' + ), + IF(ilb IS NOT NULL, + CONCAT(" AND", constructDateTimeWhereClauseForColumn("t.seconds", "t.nanos", ilb, ilb_nanos, eub, eub_nanos, operator_t)), + "" + ), + ')' + ); + + SET @stmtTransactionStr = makeStmt(sourceSet, targetSet, data, NULL, FALSE); + PREPARE stmtTransactionFilter from @stmtTransactionStr; + IF userName IS NOT NULL THEN + SET @userName = userName; + SET @realm = realm; + EXECUTE stmtTransactionFilter USING @realm, @userName; + ELSE + EXECUTE stmtTransactionFilter; + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `calcComplementUnion` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `calcComplementUnion`(in targetSet VARCHAR(255), in subResultSet VARCHAR(255), in universe VARCHAR(255), in versioned BOOLEAN) +BEGIN + IF versioned AND universe = "entities" THEN + SET @stmtComplementUnionStr = CONCAT( + 'INSERT IGNORE INTO `', targetSet, + '` SELECT e.id, _get_head_iversion(e.id) FROM entities as e WHERE NOT EXISTS ( SELECT 1 FROM `', + subResultSet, + '` AS diff WHERE diff.id=e.id AND diff._iversion = _get_head_iversion(e.id)) UNION ALL SELECT e.id, e._iversion FROM archive_entities AS e WHERE NOT EXISTS ( SELECT 1 FROM `', + subResultSet, + '` as diff WHERE e.id = diff.id AND e._iversion = diff._iversion)'); + ELSEIF versioned THEN + SET @stmtComplementUnionStr = CONCAT( + 'INSERT IGNORE INTO `', targetSet, + '` SELECT id FROM `',universe, + '` AS universe WHERE NOT EXISTS ( SELECT 1 FROM `', + subResultSet,'` + AS diff WHERE diff.id=universe.id AND diff._iversion = universe.id_version)'); + ELSE + SET @stmtComplementUnionStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` SELECT id FROM `',universe, '` AS universe WHERE NOT EXISTS ( SELECT 1 FROM `', subResultSet,'` AS diff WHERE diff.id=universe.id)'); + END IF; + PREPARE stmtComplementUnion FROM @stmtComplementUnionStr; + EXECUTE stmtComplementUnion; + DEALLOCATE PREPARE stmtComplementUnion; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `calcDifference` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `calcDifference`(in resultSetTable VARCHAR(255), in diff VARCHAR(255), in versioned BOOLEAN) +BEGIN + IF versioned THEN + SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE EXISTS ( SELECT 1 FROM `', diff,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id` AND diff._iversion=`', resultSetTable, '`.`_iversion`)'); + ELSE + SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE EXISTS ( SELECT 1 FROM `', diff,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)'); + END IF; + PREPARE diffStmt FROM @diffStmtStr; + EXECUTE diffStmt; + DEALLOCATE PREPARE diffStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `calcIntersection` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `calcIntersection`(in resultSetTable VARCHAR(255), in intersectWith VARCHAR(255), in versioned BOOLEAN) +BEGIN + IF versioned THEN + SET @diffStmtStr = CONCAT('DELETE FROM `', + resultSetTable, + '` WHERE NOT EXISTS ( SELECT 1 FROM `', + intersectWith, + '` AS diff WHERE diff.id=`', + resultSetTable, + '`.`id` AND diff._iversion=`', + resultSetTable, + '`.`_iversion`)'); + ELSE + SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE NOT EXISTS ( SELECT 1 FROM `', intersectWith,'` AS diff WHERE diff.id=`',resultSetTable,'`.`id`)'); + END IF; + PREPARE diffStmt FROM @diffStmtStr; + EXECUTE diffStmt; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `calcUnion` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `calcUnion`(in targetSet VARCHAR(255), in sourceSet VARCHAR(255)) +BEGIN + SET @diffStmtStr = CONCAT('INSERT IGNORE INTO `', targetSet, '` SELECT * FROM `',sourceSet,'`'); + PREPARE diffStmt FROM @diffStmtStr; + EXECUTE diffStmt; + DEALLOCATE PREPARE diffStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `cleanUpLinCon` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `cleanUpLinCon`() +BEGIN + + DELETE FROM units_lin_con WHERE NOT EXISTS (SELECT '1' FROM double_data WHERE unit_sig=signature_from) AND NOT EXISTS (SELECT '1' FROM integer_data WHERE unit_sig=signature_from); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `cleanUpQuery` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `cleanUpQuery`() +BEGIN + CREATE TEMPORARY TABLE IF NOT EXISTS warnings (warning TEXT NOT NULL); + SELECT * from warnings; + + SET @pstmtstr = CONCAT('DROP TEMPORARY TABLE IF EXISTS `warnings`', + IF(@tempTableList IS NULL, '', CONCAT(',',@tempTableList))); + PREPARE pstmt FROM @pstmtstr; + EXECUTE pstmt; + + SET @tempTableList = NULL; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `copyTable` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `copyTable`(in fromTable VARCHAR(255), in toTable VARCHAR(255)) +BEGIN + SET @copyTableStmtStr = CONCAT('INSERT IGNORE INTO `', toTable, '` (id) SELECT id FROM `', fromTable, '`'); + PREPARE copyTableStmt FROM @copyTableStmtStr; + EXECUTE copyTableStmt; + DEALLOCATE PREPARE copyTableStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `createTmpTable`(out newTableName VARCHAR(255), in versioned BOOLEAN) +BEGIN + call registerTempTableName(newTableName); + + IF versioned THEN + SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName, + '` ( id INT UNSIGNED, _iversion INT UNSIGNED, PRIMARY KEY (id, _iversion))' ); + ELSE + SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( id INT UNSIGNED PRIMARY KEY)' ); + END IF; + + PREPARE createTableStmt FROM @createTableStmtStr; + EXECUTE createTableStmt; + DEALLOCATE PREPARE createTableStmt; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `createTmpTable2` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `createTmpTable2`(out newTableName VARCHAR(255)) +BEGIN + call registerTempTableName(newTableName); + SET @createTableStmtStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName, + '` ( id INT UNSIGNED, id2 INT UNSIGNED, domain INT UNSIGNED, CONSTRAINT `', + newTableName,'PK` PRIMARY KEY (id,id2,domain) )' ); + + PREPARE createTableStmt FROM @createTableStmtStr; + EXECUTE createTableStmt; + DEALLOCATE PREPARE createTableStmt; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `deleteEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `deleteEntity`(in EntityID VARCHAR(255)) +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + + DELETE FROM files where file_id=InternalEntityID; + + + DELETE FROM data_type + WHERE ( domain_id = 0 + AND entity_id = 0 + AND property_id = InternalEntityID ) + OR datatype = InternalEntityID; + DELETE FROM collection_type + WHERE domain_id = 0 + AND entity_id = 0 + AND property_id = InternalEntityID; + + + DELETE FROM name_data + WHERE domain_id = 0 + AND entity_id = InternalEntityID + AND property_id = 20; + + DELETE FROM entity_ids + WHERE internal_id = InternalEntityID; + + DELETE FROM entities where id=InternalEntityID; + + + DELETE FROM entity_acl + WHERE NOT EXISTS ( + SELECT 1 FROM entities + WHERE entities.acl = entity_acl.id LIMIT 1) + AND NOT EXISTS ( + SELECT 1 FROM archive_entities + WHERE archive_entities.acl = entity_acl.id LIMIT 1); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `deleteEntityProperties` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `deleteEntityProperties`(in EntityID VARCHAR(255)) +BEGIN + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + CALL deleteIsa(InternalEntityID); + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + SELECT max(e._iversion) INTO IVersion + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID; + + + INSERT INTO archive_reference_data (domain_id, entity_id, + property_id, value, value_iversion, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, value_iversion, + status, pidx, IVersion AS _iversion + FROM reference_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_null_data (domain_id, entity_id, + property_id, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, status, + pidx, IVersion AS _iversion + FROM null_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_text_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion + FROM text_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_name_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion + FROM name_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_enum_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion + FROM enum_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_integer_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion, unit_sig) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion, unit_sig + FROM integer_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_double_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion, unit_sig) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion, unit_sig + FROM double_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_datetime_data (domain_id, entity_id, + property_id, value, value_ns, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, value_ns, + status, pidx, IVersion AS _iversion + FROM datetime_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_date_data (domain_id, entity_id, + property_id, value, status, pidx, _iversion) + SELECT domain_id, entity_id, property_id, value, status, + pidx, IVersion AS _iversion + FROM date_data + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_name_overrides (domain_id, entity_id, + property_id, name, _iversion) + SELECT domain_id, entity_id, property_id, name, + IVersion AS _iversion + FROM name_overrides + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_desc_overrides (domain_id, entity_id, + property_id, description, _iversion) + SELECT domain_id, entity_id, property_id, description, + IVersion AS _iversion + FROM desc_overrides + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_data_type (domain_id, entity_id, + property_id, datatype, _iversion) + SELECT domain_id, entity_id, property_id, datatype, + IVersion AS _iversion + FROM data_type + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_collection_type (domain_id, entity_id, + property_id, collection, _iversion) + SELECT domain_id, entity_id, property_id, collection, + IVersion AS _iversion + FROM collection_type + WHERE (domain_id = 0 AND entity_id = InternalEntityID) + OR domain_id = InternalEntityID; + + INSERT INTO archive_query_template_def (id, definition, _iversion) + SELECT id, definition, IVersion AS _iversion + FROM query_template_def + WHERE id = InternalEntityID; + + END IF; + + DELETE FROM reference_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM null_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM text_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM name_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM enum_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM integer_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM double_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM datetime_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM date_data + where (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + + DELETE FROM name_overrides + WHERE (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM desc_overrides + WHERE (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + + DELETE FROM data_type + WHERE (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + DELETE FROM collection_type + WHERE (domain_id=0 AND entity_id=InternalEntityID) OR domain_id=InternalEntityID; + + DELETE FROM query_template_def WHERE id=InternalEntityID; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `deleteIsa` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `deleteIsa`(IN InternalEntityID INT UNSIGNED) +BEGIN + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + SELECT max(_iversion) INTO IVersion + FROM entity_version + WHERE entity_id = InternalEntityID; + + + INSERT IGNORE INTO archive_isa (child, child_iversion, parent, direct) + SELECT e.child, IVersion AS child_iversion, e.parent, rpath = InternalEntityID + FROM isa_cache AS e + WHERE e.child = InternalEntityID; + END IF; + + DELETE FROM isa_cache + WHERE child = InternalEntityID + OR rpath = InternalEntityID + OR rpath LIKE concat('%>', InternalEntityID) + OR rpath LIKE concat('%>', InternalEntityID, '>%'); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `deleteLinCon` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `deleteLinCon`(in sig BIGINT) +BEGIN + + DELETE FROM units_lin_con WHERE signature_from=sig; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `entityACL` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `entityACL`(out ACLID INT UNSIGNED, in ACLSTR VARBINARY(65525)) +BEGIN + SELECT id INTO ACLID FROM entity_acl as t WHERE t.acl=ACLSTR LIMIT 1; + IF ACLID IS NULL THEN + INSERT INTO entity_acl (acl) VALUES (ACLSTR); + SET ACLID = LAST_INSERT_ID(); + END IF; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `finishNegationFilter` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `finishNegationFilter`(in resultSetTable VARCHAR(255), in diff VARCHAR(255)) +BEGIN + call calcDifference(resultSetTable, diff); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `finishSubProperty` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `finishSubProperty`(in sourceSet VARCHAR(255),in targetSet VARCHAR(255), + in list VARCHAR(255), in versioned BOOLEAN) +BEGIN + DECLARE data VARCHAR(20000) DEFAULT CONCAT('`',list,'`'); + SET @finishSubPropertyStmtStr = makeStmt(sourceSet, targetSet, data, NULL, versioned); + + PREPARE finishSubPropertyStmt FROM @finishSubPropertyStmtStr; + EXECUTE finishSubPropertyStmt; + DEALLOCATE PREPARE finishSubPropertyStmt; + + SELECT @finishSubPropertyStmtStr AS finishSubPropertyStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getChildren` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `getChildren`(in tableName varchar(255), in versioned BOOLEAN) +BEGIN + DECLARE found_children INT UNSIGNED DEFAULT 0; + + DROP TEMPORARY TABLE IF EXISTS dependTemp; + CREATE TEMPORARY TABLE dependTemp (id INT UNSIGNED, _iversion INT UNSIGNED, PRIMARY KEY(id, _iversion)); + + + SET @initDepend = CONCAT( + 'INSERT IGNORE INTO dependTemp (id, _iversion) SELECT i.child, ', + IF(versioned, + '_get_head_iversion(i.child)', + '0'), + ' FROM isa_cache AS i INNER JOIN `', + tableName, + '` AS t ON (i.parent=t.id);'); + PREPARE initDependStmt FROM @initDepend; + + EXECUTE initDependStmt; + SET found_children = found_children + ROW_COUNT(); + + + + IF versioned IS TRUE THEN + SET @initDepend = CONCAT( + 'INSERT IGNORE INTO dependTemp (id, _iversion) ', + 'SELECT i.child, i.child_iversion FROM archive_isa AS i INNER JOIN `', + tableName, + '` AS t ON (i.parent=t.id);'); + PREPARE initDependStmt FROM @initDepend; + + EXECUTE initDependStmt; + SET found_children = found_children + ROW_COUNT(); + END IF; + + + + + IF found_children != 0 THEN + SET @transfer = CONCAT( + 'INSERT IGNORE INTO `', + tableName, + IF(versioned, + '` (id, _iversion) SELECT id, _iversion FROM dependTemp', + '` (id) SELECT id FROM dependTemp')); + PREPARE transferstmt FROM @transfer; + EXECUTE transferstmt; + DEALLOCATE PREPARE transferstmt; + END IF; + + + DEALLOCATE PREPARE initDependStmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getDependentEntities` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `getDependentEntities`(in EntityID VARCHAR(255)) +BEGIN + + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + DROP TEMPORARY TABLE IF EXISTS referring; + CREATE TEMPORARY TABLE referring ( + id INT UNSIGNED UNIQUE + ); + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM reference_data WHERE (value=InternalEntityID OR property_id=InternalEntityID) AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM reference_data WHERE (value=InternalEntityID OR property_id=InternalEntityID) AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM text_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM text_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM enum_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM enum_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM name_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM name_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM integer_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM integer_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM double_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM double_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM datetime_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM datetime_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM date_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM date_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id FROM null_data WHERE property_id=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id FROM null_data WHERE property_id=InternalEntityID AND domain_id!=InternalEntityID AND entity_id!=InternalEntityID AND domain_id!=0; + + INSERT IGNORE INTO referring (id) SELECT entity_id from data_type WHERE datatype=InternalEntityID AND domain_id=0 AND entity_id!=InternalEntityID; + INSERT IGNORE INTO referring (id) SELECT domain_id from data_type WHERE datatype=InternalEntityID; + + INSERT IGNORE INTO referring (id) SELECT child FROM isa_cache WHERE parent = InternalEntityID AND rpath = child; + + SELECT e.id FROM referring AS r LEFT JOIN entity_ids AS e ON r.id = e.internal_id WHERE r.id!=0 AND e.internal_id!=InternalEntityID; + + DROP TEMPORARY TABLE referring; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getFile` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8mb4 */ ; +/*!50003 SET character_set_results = utf8mb4 */ ; +/*!50003 SET collation_connection = utf8mb4_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`caosdb`@`%` PROCEDURE `getFile`(in FileID INT) +BEGIN + +Select name, description, role into @name, @description, @role from entities where id=FileID LIMIT 1; + +IF @role = 'file' Then + Select path, hash, size into @FilePath, @FileHash, @FileSize from files where file_id=FileID LIMIT 1; + Select timestamp, user_id, user_agent into @FileCreated, @FileCreator, @FileGenerator from history where entity_id=FileID AND event='insertion' LIMIT 1; + +Select +FileID as FileID, +@FilePath as FilePath, +@FileSize as FileSize, +@FileHash as FileHash, +@FileDescription as FileDescription, +@FileCreated as FileCreated, +@FileCreator as FileCreator, +@FileGenerator as FileGenerator, +NULL as FileOwner, +NULL as FilePermission, +NULL as FileChecksum; + +END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getFileIdByPath` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `getFileIdByPath`(in FilePath TEXT) +BEGIN + + SELECT e.id AS FileID FROM files AS f LEFT JOIN entity_ids ON e.internal_in = f.file_id WHERE f.path=FilePath LIMIT 1; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getIdByName` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `getIdByName`(in Name VARCHAR(255), in Role VARCHAR(255), in Lmt INT UNSIGNED) +BEGIN + + SET @stmtStr = "SELECT e.id AS id FROM name_data AS n JOIN entity_ids AS e ON (n.domain_id=0 AND n.property_id=20 AND e.internal_id = n.entity_id) JOIN entities AS i ON (i.id = e.internal_id) WHERE n.value = ?"; + + IF Role IS NULL THEN + SET @stmtStr = CONCAT(@stmtStr, " AND i.role!='ROLE'"); + ELSE + SET @stmtStr = CONCAT(@stmtStr, " AND i.role='", Role, "'"); + END IF; + + IF Lmt IS NOT NULL THEN + SET @stmtStr = CONCAT(@stmtStr, " LIMIT ", Lmt); + END IF; + + SET @vName = Name; + PREPARE stmt FROM @stmtStr; + EXECUTE stmt USING @vName; + DEALLOCATE PREPARE stmt; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `getRules` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8mb4 */ ; +/*!50003 SET character_set_results = utf8mb4 */ ; +/*!50003 SET collation_connection = utf8mb4_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`caosdb`@`%` PROCEDURE `getRules`(in DomainID INT UNSIGNED, in EntityID INT UNSIGNED, in TransType VARCHAR(255)) +BEGIN + + + + +SELECT rules.transaction, rules.criterion, rules.modus from rules where if(DomainID is null, rules.domain_id=0,rules.domain_id=DomainID) AND if(EntityID is null, rules.entity_id=0,rules.entity_id=EntityID) AND if(TransType is null,true=true,rules.transaction=TransType); + + + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `get_version_history` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `get_version_history`( + in EntityID VARCHAR(255)) +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + + SELECT c.version AS child, + NULL as parent, + t.seconds AS child_seconds, + t.nanos AS child_nanos, + t.username AS child_username, + t.realm AS child_realm + FROM entity_version AS c INNER JOIN transactions as t + ON ( c.srid = t.srid ) + WHERE c.entity_id = InternalEntityID + AND c._ipparent is Null + + + + + + + UNION SELECT c.version AS child, + p.version AS parent, + t.seconds AS child_seconds, + t.nanos AS child_nanos, + t.username AS child_username, + t.realm AS child_realm + FROM entity_version AS p + INNER JOIN entity_version as c + INNER JOIN transactions AS t + ON (c._ipparent = p._iversion + AND c.entity_id = p.entity_id + AND t.srid = c.srid) + WHERE p.entity_id = InternalEntityID; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initBackReference` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initBackReference`(in PropertyID VARCHAR(255), in PropertyName VARCHAR(255), in EntityID VARCHAR(255), in EntityName VARCHAR(255)) +BEGIN + DECLARE propertiesTable VARCHAR(255) DEFAULT NULL; + DECLARE entitiesTable VARCHAR(255) DEFAULT NULL; + + IF PropertyName IS NOT NULL THEN + + call createTmpTable(propertiesTable, FALSE); + call initSubEntity(PropertyID, PropertyName, propertiesTable); + END IF; + + IF EntityName IS NOT NULL THEN + + call createTmpTable(entitiesTable, FALSE); + call initSubEntity(EntityID, EntityName, entitiesTable); + END IF; + + SELECT propertiesTable, entitiesTable; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initDisjunctionFilter` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initDisjunctionFilter`(in versioned BOOLEAN) +BEGIN + call initEmptyTargetSet(NULL, versioned); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initEmptyTargetSet` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initEmptyTargetSet`(in targetSet VARCHAR(255), in versioned BOOLEAN) +BEGIN + DECLARE newTableName VARCHAR(255) DEFAULT targetSet; + IF targetSet IS NOT NULL THEN + SET @isNotEmptyVar = NULL; + SET @isEmptyStmtStr = CONCAT("SELECT 1 INTO @isNotEmptyVar FROM `",targetSet,"` LIMIT 1"); + PREPARE stmtIsNotEmpty FROM @isEmptyStmtStr; + EXECUTE stmtIsNotEmpty; + DEALLOCATE PREPARE stmtIsNotEmpty; + IF @isNotEmptyVar IS NOT NULL THEN + call createTmpTable(newTableName, versioned); + END IF; + ELSE + call createTmpTable(newTableName, versioned); + END IF; + SELECT newTableName AS newTableName; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initEntity`(in eid VARCHAR(255), in ename VARCHAR(255), + in enameLike VARCHAR(255), in enameRegexp VARCHAR(255), + in resultset VARCHAR(255), in versioned BOOLEAN) +initEntityLabel: BEGIN + DECLARE select_columns VARCHAR(255) DEFAULT '` (id) SELECT entity_id FROM name_data '; + SET @initEntityStmtStr = NULL; + + + + IF versioned IS TRUE THEN + SET select_columns = '` (id, _iversion) SELECT entity_id, _get_head_iversion(entity_id) FROM name_data '; + END IF; + IF ename IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + select_columns, + 'WHERE value=?; '); + SET @query_param = ename; + ELSEIF enameLike IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + select_columns, + 'WHERE value LIKE ?;'); + SET @query_param = enameLike; + ELSEIF enameRegexp IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + select_columns, + 'WHERE value REGEXP ?;'); + SET @query_param = enameRegexp; + END IF; + + + IF @initEntityStmtStr IS NOT NULL THEN + PREPARE initEntityStmt FROM @initEntityStmtStr; + EXECUTE initEntityStmt USING @query_param; + DEALLOCATE PREPARE initEntityStmt; + END IF; + + IF eid IS NOT NULL THEN + + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + IF(versioned, + '` (id, _iversion) SELECT eids.internal_id, _get_head_iversion(eids.internal_id) ', + '` (id) SELECT eids.internal_id '), + 'FROM entity_ids AS eids WHERE eids.id=',eid,';'); + PREPARE initEntityStmt FROM @initEntityStmtStr; + EXECUTE initEntityStmt; + DEALLOCATE PREPARE initEntityStmt; + END IF; + + + + + IF versioned IS TRUE THEN + SET select_columns = '` (id, _iversion) SELECT entity_id, _iversion FROM archive_name_data '; + IF ename IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + select_columns, + 'WHERE value=?; '); + SET @query_param = ename; + ELSEIF enameLike IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + select_columns, + 'WHERE value LIKE ?;'); + SET @query_param = enameLike; + ELSEIF enameRegexp IS NOT NULL THEN + SET @initEntityStmtStr = CONCAT( + 'INSERT IGNORE INTO `', + resultset, + 'WHERE value REGEXP ?;'); + SET @query_param = enameRegexp; + END IF; + + + IF @initEntityStmtStr IS NOT NULL THEN + PREPARE initEntityStmt FROM @initEntityStmtStr; + EXECUTE initEntityStmt USING @query_param; + DEALLOCATE PREPARE initEntityStmt; + END IF; + END IF; + + + + IF @initEntityStmtStr IS NOT NULL THEN + call getChildren(resultset, versioned); + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initPOVPropertiesTable` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initPOVPropertiesTable`(in PropertyID VARCHAR(255), in PropertyName VARCHAR(255), in sourceSet VARCHAR(255)) +BEGIN + DECLARE propertiesTable VARCHAR(255) DEFAULT NULL; + DECLARE replTbl VARCHAR(255) DEFAULT NULL; + DECLARE ecount INT DEFAULT 0; + DECLARE t1 BIGINT DEFAULT 0; + DECLARE t2 BIGINT DEFAULT 0; + DECLARE t3 BIGINT DEFAULT 0; + DECLARE t4 BIGINT DEFAULT 0; + DECLARE t5 BIGINT DEFAULT 0; + DECLARE t6 BIGINT DEFAULT 0; + + + IF PropertyName is NOT NULL THEN + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t1 from (select uuid() uid) as alias; + call createTmpTable2(propertiesTable); + + + + + + + + + + SET @initPOVPropertiesTableStmt1 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) SELECT property_id, entity_id, domain_id from name_overrides WHERE name = ? UNION ALL SELECT entity_id, domain_id, 0 FROM name_data WHERE value = ?;'); + PREPARE stmt FROM @initPOVPropertiesTableStmt1; + SET @PropertyName = PropertyName; + EXECUTE stmt USING @PropertyName, @PropertyName; + SET ecount = ROW_COUNT(); + + + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t2 from (select uuid() uid) as alias; + IF PropertyID IS NOT NULL THEN + SET @initPOVPropertiesTableStmt2 = CONCAT('INSERT IGNORE INTO `', propertiesTable, '` (id, id2, domain) VALUES (?, 0, 0)'); + PREPARE stmt FROM @initPOVPropertiesTableStmt2; + SET @PropertyID = PropertyID; + EXECUTE stmt USING @PropertyID; + SET ecount = ecount + ROW_COUNT(); + END IF; + + + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t3 from (select uuid() uid) as alias; + IF ecount > 0 THEN + + call getChildren(propertiesTable, False); + END IF; + + + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t4 from (select uuid() uid) as alias; + IF ecount > 0 THEN + call createTmpTable2(replTbl); + SET @replTblStmt1 := CONCAT('INSERT IGNORE INTO `',replTbl, '` (id, id2, domain) SELECT r.value as id, r.entity_id as id2, 0 as domain_id FROM reference_data AS r WHERE status="REPLACEMENT" AND domain_id=0 AND EXISTS (SELECT * FROM `', sourceSet, '` AS s WHERE s.id=r.entity_id) AND EXISTS (SELECT * FROM `', propertiesTable, '` AS p WHERE p.domain = 0 AND p.id2=0 AND p.id=r.property_id);'); + PREPARE replStmt1 FROM @replTblStmt1; + EXECUTE replStmt1; + DEALLOCATE PREPARE replStmt1; + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t5 from (select uuid() uid) as alias; + + SET @replTblStmt2 := CONCAT('INSERT IGNORE INTO `', propertiesTable, '` SELECT id, id2, domain FROM `', replTbl, '`;'); + PREPARE replStmt2 FROM @replTblStmt2; + EXECUTE replStmt2; + DEALLOCATE PREPARE replStmt2; + SELECT conv( concat( substring(uid,16,3), substring(uid,10,4), substring(uid,1,8)),16,10) div 10000 - (141427 * 24 * 60 * 60 * 1000) as current_mills INTO t6 from (select uuid() uid) as alias; + END IF; + END IF; + SELECT propertiesTable, t1, t2, t3, t4, t5, t6, @initPOVPropertiesTableStmt1 as initPOVPropertiesTableStmt1, @initPOVPropertiesTableStmt2 as initPOVPropertiesTableStmt2, @replTblStmt1 as replTblStmt1, @replTblStmt2 as replTblStmt2; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initPOVRefidsTable` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initPOVRefidsTable`(in PropertyID VARCHAR(255), in PropertyName VARCHAR(255)) +BEGIN + DECLARE refIdsTable VARCHAR(255) DEFAULT NULL; + + + IF PropertyName IS NOT NULL THEN + + call createTmpTable(refIdsTable, FALSE); + call initSubEntity(PropertyID, PropertyName, refIdsTable); + + END IF; + SELECT refIdsTable; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initQuery` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initQuery`(in versioned BOOLEAN) +BEGIN + CREATE TEMPORARY TABLE IF NOT EXISTS warnings (warning TEXT NOT NULL); + + call createTmpTable(@resultSet, versioned); + SELECT @resultSet as tablename; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initSubEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initSubEntity`(in EntityID VARCHAR(255), in ename VARCHAR(255), in tableName VARCHAR(255)) +BEGIN + DECLARE ecount INT DEFAULT 0; + DECLARE op VARCHAR(255) DEFAULT '='; + + + IF LOCATE("%", ename) > 0 THEN + SET op = "LIKE"; + END IF; + + SET @stmtStr = CONCAT('INSERT IGNORE INTO `', + tableName, + '` (id) SELECT entity_id FROM name_data WHERE value ', + op, + ' ? AND domain_id=0;'); + + PREPARE stmt FROM @stmtStr; + SET @ename = ename; + EXECUTE stmt USING @ename; + SET ecount = ROW_COUNT(); + DEALLOCATE PREPARE stmt; + + IF EntityID IS NOT NULL THEN + SET @stmtStr = CONCAT('INSERT IGNORE INTO `', tableName, '` (id) SELECT internal_id FROM entity_ids WHERE id = ?'); + PREPARE stmt FROM @stmtStr; + SET @eid = EntityID; + EXECUTE stmt USING @eid; + SET ecount = ecount + ROW_COUNT(); + DEALLOCATE PREPARE stmt; + END IF; + + IF ecount > 0 THEN + + call getChildren(tableName, False); + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `initSubProperty` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `initSubProperty`(in sourceSet VARCHAR(255), in propertiesTable VARCHAR(255), in refIdsTable VARCHAR(255)) +BEGIN +DECLARE newTableName VARCHAR(255) DEFAULT NULL; + call registerTempTableName(newTableName); + + SET @createSubPropertyListTableStr = CONCAT('CREATE TEMPORARY TABLE `', newTableName,'` ( entity_id INT UNSIGNED NOT NULL, id INT UNSIGNED NOT NULL, domain INT UNSIGNED NOT NULL, CONSTRAINT `',newTableName,'PK` PRIMARY KEY (entity_id, id, domain)) ' ); + + PREPARE createSubPropertyListTable FROM @createSubPropertyListTableStr; + EXECUTE createSubPropertyListTable; + DEALLOCATE PREPARE createSubPropertyListTable; + + SET @subResultSetStmtStr = CONCAT('INSERT IGNORE INTO `', newTableName, '` (domain, entity_id, id) + SELECT data1.domain_id as domain, data1.entity_id as entity_id, data1.value as id + FROM reference_data as data1 JOIN reference_data as data2 + ON (data1.domain_id=0 + AND data1.domain_id=data2.domain_id + AND data2.entity_id=data1.entity_id + AND ( + (data1.property_id=data2.value AND data2.status="REPLACEMENT") + OR + (data1.property_id!=data2.value AND data2.status!="REPLACEMENT" AND data1.status!="REPLACEMENT" AND data1.property_id=data2.property_id) + ) + AND EXISTS (SELECT 1 FROM `', sourceSet, '` as source WHERE source.id=data1.entity_id LIMIT 1)', + IF(propertiesTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', propertiesTable, '` as props WHERE props.id=data2.property_id LIMIT 1)')), + IF(refIdsTable IS NULL, '', CONCAT(' AND EXISTS (SELECT 1 FROM `', refIdsTable, '` as refs WHERE refs.id=data1.value LIMIT 1)')), + ')' + ); + + + PREPARE subResultSetStmt FROM @subResultSetStmtStr; + EXECUTE subResultSetStmt; + DEALLOCATE PREPARE subResultSetStmt; + + SELECT newTableName as list; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertEntity`(in EntityID VARCHAR(255), in EntityName VARCHAR(255), in EntityDesc TEXT, in EntityRole VARCHAR(255), in ACL VARBINARY(65525)) +BEGIN + DECLARE NewACLID INT UNSIGNED DEFAULT NULL; + DECLARE Hash VARBINARY(255) DEFAULT NULL; + DECLARE Version VARBINARY(255) DEFAULT NULL; + DECLARE Transaction VARBINARY(255) DEFAULT NULL; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + + + call entityACL(NewACLID, ACL); + + + INSERT INTO entities (description, role, acl) + VALUES (EntityDesc, EntityRole, NewACLID); + + + SET InternalEntityID = LAST_INSERT_ID(); + + INSERT INTO entity_ids (internal_id, id) VALUES (InternalEntityID, EntityID); + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + + SET Transaction = @SRID; + SET Version = SHA1(UUID()); + CALL insert_single_child_version(InternalEntityID, Hash, Version, Null, Transaction); + END IF; + + + + IF EntityName IS NOT NULL THEN + INSERT INTO name_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES (0, InternalEntityID, 20, EntityName, "FIX", 0); + END IF; + + SELECT Version as Version; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertEntityCollection` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertEntityCollection`(in PropertyID VARCHAR(255), in Collection VARCHAR(255)) +BEGIN + DECLARE InternalPropertyID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalPropertyID FROM entity_ids WHERE id=PropertyID; + + INSERT INTO collection_type (domain_id, entity_id, property_id, collection) SELECT 0, 0, InternalPropertyID, Collection; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertEntityDataType` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertEntityDataType`(in PropertyID VARCHAR(255), in DataTypeID VARCHAR(255)) +BEGIN + DECLARE InternalPropertyID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalPropertyID FROM entity_ids WHERE id=PropertyID; + + INSERT INTO data_type (domain_id, entity_id, property_id, datatype) SELECT 0, 0, InternalPropertyID, ( SELECT internal_id FROM entity_ids WHERE id = DataTypeID); + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertEntityProperty` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertEntityProperty`( + in DomainID VARCHAR(255), + in EntityID VARCHAR(255), + in PropertyID VARCHAR(255), + in Datatable VARCHAR(255), + in PropertyValue TEXT, + in PropertyUnitSig BIGINT, + in PropertyStatus VARCHAR(255), + in NameOverride VARCHAR(255), + in DescOverride TEXT, + in DatatypeOverride VARCHAR(255), + in Collection VARCHAR(255), + in PropertyIndex INT UNSIGNED) +BEGIN + DECLARE ReferenceValueIVersion INT UNSIGNED DEFAULT NULL; + DECLARE ReferenceValue INT UNSIGNED DEFAULT NULL; + DECLARE AT_PRESENT INTEGER DEFAULT NULL; + DECLARE InternalDataTypeID INT UNSIGNED DEFAULT NULL; + DECLARE InternalPropertyID INT UNSIGNED DEFAULT NULL; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + DECLARE InternalDomainID INT UNSIGNED DEFAULT 0; + + SELECT internal_id INTO InternalDomainID FROM entity_ids WHERE id = DomainID; + + + + IF LOCATE("$", EntityID) = 1 THEN + SET InternalEntityID=SUBSTRING(EntityID, 2); + ELSE + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + END IF; + IF LOCATE("$", PropertyID) = 1 THEN + SET InternalPropertyID=SUBSTRING(PropertyID, 2); + ELSE + SELECT internal_id INTO InternalPropertyID FROM entity_ids WHERE id = PropertyID; + END IF; + + CASE Datatable + WHEN 'double_data' THEN + INSERT INTO double_data + (domain_id, entity_id, property_id, value, unit_sig, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex); + WHEN 'integer_data' THEN + INSERT INTO integer_data + (domain_id, entity_id, property_id, value, unit_sig, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyValue, PropertyUnitSig, PropertyStatus, PropertyIndex); + WHEN 'datetime_data' THEN + INSERT INTO datetime_data + (domain_id, entity_id, property_id, value, value_ns, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, SUBSTRING_INDEX(PropertyValue, 'UTC', 1), IF(SUBSTRING_INDEX(PropertyValue, 'UTC', -1)='',NULL,SUBSTRING_INDEX(PropertyValue, 'UTC', -1)), PropertyStatus, PropertyIndex); + WHEN 'reference_data' THEN + + + SET AT_PRESENT=LOCATE("@", PropertyValue); + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") AND AT_PRESENT > 0 THEN + SELECT internal_id INTO ReferenceValue FROM entity_ids WHERE id = SUBSTRING_INDEX(PropertyValue, '@', 1); + SET ReferenceValueIVersion = get_iversion(ReferenceValue, + SUBSTRING_INDEX(PropertyValue, '@', -1)); + IF ReferenceValueIVersion IS NULL THEN + + SELECT 0 from `ReferenceValueIVersion_WAS_NULL`; + END IF; + + ELSEIF LOCATE("$", PropertyValue) = 1 THEN + SET ReferenceValue = SUBSTRING(PropertyValue, 2); + ELSE + SELECT internal_id INTO ReferenceValue FROM entity_ids WHERE id = PropertyValue; + END IF; + + + INSERT INTO reference_data + (domain_id, entity_id, property_id, value, value_iversion, status, + pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, ReferenceValue, + ReferenceValueIVersion, PropertyStatus, PropertyIndex); + WHEN 'enum_data' THEN + INSERT INTO enum_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyValue, PropertyStatus, PropertyIndex); + WHEN 'date_data' THEN + INSERT INTO date_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, SUBSTRING_INDEX(PropertyValue, '.', 1), PropertyStatus, PropertyIndex); + WHEN 'text_data' THEN + INSERT INTO text_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyValue, PropertyStatus, PropertyIndex); + WHEN 'null_data' THEN + INSERT INTO null_data + (domain_id, entity_id, property_id, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyStatus, PropertyIndex); + WHEN 'name_data' THEN + INSERT INTO name_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES + (InternalDomainID, InternalEntityID, InternalPropertyID, PropertyValue, PropertyStatus, PropertyIndex); + + ELSE + + SELECT * FROM table_does_not_exist; + END CASE; + + IF DatatypeOverride IS NOT NULL THEN + SELECT internal_id INTO InternalDataTypeID from entity_ids WHERE id = DatatypeOverride; + call overrideType(InternalDomainID, InternalEntityID, InternalPropertyID, InternalDataTypeID); + IF Collection IS NOT NULL THEN + INSERT INTO collection_type (domain_id, entity_id, property_id, collection) VALUES (InternalDomainID, InternalEntityID, InternalPropertyID, Collection); + END IF; + END IF; + + IF NameOverride IS NOT NULL THEN + call overrideName(InternalDomainID, InternalEntityID, InternalPropertyID, NameOverride); + END IF; + + IF DescOverride IS NOT NULL THEN + call overrideDesc(InternalDomainID, InternalEntityID, InternalPropertyID, DescOverride); + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertIsa` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertIsa`(IN ChildID VARCHAR(255), IN ParentID VARCHAR(255)) +insert_is_a_proc: BEGIN + + DECLARE c INT UNSIGNED DEFAULT NULL; + DECLARE p INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO c FROM entity_ids WHERE id = ChildID; + SELECT internal_id INTO p FROM entity_ids WHERE id = ParentID; + + INSERT INTO isa_cache (child, parent, rpath) VALUES (c, p, c); + + IF p = c THEN + + LEAVE insert_is_a_proc; + END IF; + + + + + + INSERT IGNORE INTO isa_cache SELECT + c + AS child, + i.parent + AS parent, + IF(p=i.rpath or i.rpath=parent, + p, + concat(p, ">", i.rpath)) + AS rpath + FROM isa_cache AS i WHERE i.child = p AND i.child != i.parent; + + + + INSERT IGNORE INTO isa_cache SELECT + l.child, + r.parent, + IF(l.rpath=l.child AND r.rpath=c, + c, + concat(IF(l.rpath=l.child, + c, + concat(l.rpath, '>', c)), + IF(r.rpath=c, + '', + concat('>', r.rpath)))) + AS rpath + FROM + isa_cache AS l INNER JOIN isa_cache AS r + ON (l.parent = c AND c = r.child AND l.child != l.parent); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insertLinCon` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insertLinCon`(in signature_from BIGINT, in signature_to BIGINT, in a DECIMAL(65,30), in b_dividend BIGINT, in b_divisor BIGINT, in c DECIMAL(65,30)) +BEGIN + + INSERT IGNORE INTO units_lin_con (signature_from, signature_to, a, b_dividend, b_divisor, c) VALUES (signature_from, signature_to, a, b_dividend, b_divisor, c); + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `insert_single_child_version` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `insert_single_child_version`( + in InternalEntityID INT UNSIGNED, + in Hash VARBINARY(255), + in Version VARBINARY(255), + in Parent VARBINARY(255), + in Transaction VARBINARY(255)) +BEGIN + DECLARE newiversion INT UNSIGNED DEFAULT NULL; + DECLARE newipparent INT UNSIGNED DEFAULT NULL; + + + IF Parent IS NOT NULL THEN + SELECT e._iversion INTO newipparent + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID + AND e.version = Parent; + IF newipparent IS NULL THEN + + SELECT concat("This parent does not exists: ", Parent) + FROM parent_version_does_not_exist; + END IF; + END IF; + + + + SELECT max(e._iversion)+1 INTO newiversion + FROM entity_version AS e + WHERE e.entity_id=InternalEntityID; + IF newiversion IS NULL THEN + SET newiversion = 1; + END IF; + + INSERT INTO entity_version + (entity_id, hash, version, _iversion, _ipparent, srid) + VALUES + (InternalEntityID, Hash, Version, newiversion, newipparent, Transaction); + + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `intersectTable` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `intersectTable`(in resultSetTable VARCHAR(255), in diff VARCHAR(255)) +BEGIN + SET @diffStmtStr = CONCAT('DELETE FROM `', resultSetTable, '` WHERE id NOT IN ( SELECT id FROM `', diff,'`)'); + PREPARE diffStmt FROM @diffStmtStr; + EXECUTE diffStmt; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `isSubtype` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `isSubtype`(in ChildID VARCHAR(255), in ParentID VARCHAR(255)) +BEGIN + DECLARE c INT UNSIGNED DEFAULT NULL; + DECLARE p INT UNSIGNED DEFAULT NULL; + DECLARE ret BOOLEAN DEFAULT FALSE; + + SELECT internal_id INTO c from entity_ids WHERE id = ChildID; + SELECT internal_id INTO p from entity_ids WHERE id = ParentID; + + SELECT TRUE INTO ret FROM isa_cache AS i WHERE i.child=c AND i.parent=p LIMIT 1; + SELECT ret as ISA; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `overrideDesc` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `overrideDesc`(in InternalDomainID INT UNSIGNED, in InternalEntityID INT UNSIGNED, in InternalPropertyID INT UNSIGNED, in Description TEXT) +BEGIN + INSERT INTO desc_overrides (domain_id, entity_id, property_id, description) VALUES (InternalDomainID, InternalEntityID, InternalPropertyID, Description); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `overrideName` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `overrideName`(in InternalDomainID INT UNSIGNED, in InternalEntityID INT UNSIGNED, in InternalPropertyID INT UNSIGNED, in Name VARCHAR(255)) +BEGIN + INSERT INTO name_overrides (domain_id, entity_id, property_id, name) VALUES (InternalDomainID, InternalEntityID, InternalPropertyID, Name); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `overrideType` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `overrideType`(in InternalDomainID INT UNSIGNED, in InternalEntityID INT UNSIGNED, in InternalPropertyID INT UNSIGNED, in InternalDataTypeID INT UNSIGNED) +BEGIN + INSERT INTO data_type (domain_id, entity_id, property_id, datatype) VALUES (InternalDomainID, InternalEntityID, InternalPropertyID, InternalDataTypeID); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `raiseWarning` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `raiseWarning`(in str VARCHAR(20000)) +BEGIN + INSERT INTO warnings VALUES (str); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `registerReplacementIds` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `registerReplacementIds`(in amount INT UNSIGNED) +BEGIN + DECLARE ED INTEGER DEFAULT NULL; + + SELECT COUNT(id) INTO ED FROM entities WHERE Role='_REPLACEMENT' AND id!=0; + + WHILE ED < amount DO + INSERT INTO entities (description, role, acl) VALUES + (NULL, '_REPLACEMENT', 0); + + SET ED = ED + 1; + END WHILE; + + SELECT CONCAT("$", e.id) as ReplacementID FROM entities AS e WHERE e.Role='_REPLACEMENT' and e.id!=0; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `registerTempTableName` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `registerTempTableName`(out newTableName VARCHAR(255)) +BEGIN + SET newTableName = md5(CONCAT(RAND(),CURRENT_TIMESTAMP())); + SET @tempTableList = IF(@tempTableList IS NULL, + CONCAT('`',newTableName,'`'), + CONCAT(@tempTableList, ',`', newTableName, '`') + ); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `retrieveEntity`( + in EntityID VARCHAR(255), + in Version VARBINARY(255)) +retrieveEntityBody: BEGIN + DECLARE FilePath VARCHAR(255) DEFAULT NULL; + DECLARE FileSize VARCHAR(255) DEFAULT NULL; + DECLARE FileHash VARCHAR(255) DEFAULT NULL; + DECLARE DatatypeID VARCHAR(255) DEFAULT NULL; + DECLARE InternalDatatypeID INT UNSIGNED DEFAULT NULL; + DECLARE CollectionName VARCHAR(255) DEFAULT NULL; + DECLARE IsHead BOOLEAN DEFAULT TRUE; + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + IF InternalEntityID IS NULL THEN + + SELECT 0 FROM entities WHERE 0 = 1; + LEAVE retrieveEntityBody; + END IF; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + + IF Version IS NULL OR UPPER(Version) = "HEAD" THEN + SET Version = get_head_version(EntityID); + ELSEIF UPPER(LEFT(Version, 5)) = "HEAD~" THEN + SET IsHead = FALSE; + SET Version = get_head_relative(EntityID, SUBSTR(Version, 6)); + ELSE + SELECT get_head_version(EntityID) = Version INTO IsHead; + END IF; + + IF IsHead IS FALSE THEN + SET IVersion=get_iversion(InternalEntityID, Version); + + IF IVersion IS NULL THEN + + SELECT 0 FROM entities WHERE 0 = 1; + LEAVE retrieveEntityBody; + END IF; + + SELECT path, size, HEX(hash) + INTO FilePath, FileSize, FileHash + FROM archive_files + WHERE file_id = InternalEntityID + AND _iversion = IVersion + LIMIT 1; + + SELECT datatype + INTO InternalDatatypeID + FROM archive_data_type + WHERE domain_id = 0 + AND entity_id = 0 + AND property_id = InternalEntityID + AND _iversion = IVersion + LIMIT 1; + + SELECT collection + INTO CollectionName + FROM archive_collection_type + WHERE domain_id = 0 + AND entity_id = 0 + AND property_id = InternalEntityID + AND _iversion = IVersion + LIMIT 1; + + + SELECT + ( SELECT value FROM + ( SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_id = InternalDatatypeID + AND property_id = 20 + ) AS tmp LIMIT 1 ) AS DatatypeName, + ( SELECT id FROM entity_ids WHERE internal_id=InternalDatatypeID ) AS DatatypeID, + CollectionName AS Collection, + EntityID AS EntityID, + ( SELECT value FROM archive_name_data + WHERE domain_id = 0 + AND entity_ID = InternalEntityID + AND property_id = 20 + AND _iversion = IVersion + ) AS EntityName, + e.description AS EntityDesc, + e.role AS EntityRole, + FileSize AS FileSize, + FilePath AS FilePath, + FileHash AS FileHash, + (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL, + Version AS Version + FROM archive_entities AS e + WHERE e.id = InternalEntityID + AND e._iversion = IVersion + LIMIT 1; + + + LEAVE retrieveEntityBody; + + END IF; + END IF; + + SELECT path, size, hex(hash) + INTO FilePath, FileSize, FileHash + FROM files + WHERE file_id = InternalEntityID + LIMIT 1; + + SELECT dt.datatype INTO InternalDatatypeID + FROM data_type as dt + WHERE dt.domain_id=0 + AND dt.entity_id=0 + AND dt.property_id=InternalEntityID + LIMIT 1; + + SELECT collection INTO CollectionName + FROM collection_type + WHERE domain_id=0 + AND entity_id=0 + AND property_id=InternalEntityID + LIMIT 1; + + SELECT + ( SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_id = InternalDatatypeID + AND property_id = 20 LIMIT 1 ) AS DatatypeName, + ( SELECT id FROM entity_ids WHERE internal_id=InternalDatatypeID ) AS DatatypeID, + CollectionName AS Collection, + EntityID AS EntityID, + ( SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_ID = InternalEntityID + AND property_id = 20 LIMIT 1) AS EntityName, + e.description AS EntityDesc, + e.role AS EntityRole, + FileSize AS FileSize, + FilePath AS FilePath, + FileHash AS FileHash, + (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL, + Version AS Version + FROM entities e WHERE id = InternalEntityID LIMIT 1; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityParents` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `retrieveEntityParents`( + in EntityID VARCHAR(255), + in Version VARBINARY(255)) +retrieveEntityParentsBody: BEGIN + + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE IsHead BOOLEAN DEFAULT TRUE; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + IF Version IS NOT NULL THEN + SELECT get_head_version(EntityID) = Version INTO IsHead; + END IF; + + IF IsHead IS FALSE THEN + SELECT e._iversion INTO IVersion + FROM entity_version as e + WHERE e.entity_id = InternalEntityID + AND e.version = Version; + + IF IVersion IS NULL THEN + + LEAVE retrieveEntityParentsBody; + END IF; + + SELECT + ( SELECT id FROM entity_ids WHERE internal_id = i.parent) AS ParentID, + ( SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_id = i.parent + AND property_id = 20 + ) AS ParentName, + + + + + e.description AS ParentDescription, + e.role AS ParentRole, + (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL + FROM archive_isa AS i JOIN entities AS e + ON (i.parent = e.id) + WHERE i.child = InternalEntityID + AND i.child_iversion = IVersion + AND i.direct IS TRUE + ; + + LEAVE retrieveEntityParentsBody; + END IF; + END IF; + + SELECT + ( SELECT id FROM entity_ids WHERE internal_id = i.parent) AS ParentID, + ( SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_id = i.parent + AND property_id = 20 ) AS ParentName, + e.description AS ParentDescription, + e.role AS ParentRole, + (SELECT acl FROM entity_acl AS a WHERE a.id = e.acl) AS ACL + FROM isa_cache AS i JOIN entities AS e + ON (i.parent = e.id) + WHERE i.child = InternalEntityID + AND i.rpath = InternalEntityID; +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `retrieveEntityProperties` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `retrieveEntityProperties`( + in DomainID VARCHAR(255), + in EntityID VARCHAR(255), + in Version VARBINARY(255)) +retrieveEntityPropertiesBody: BEGIN + + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE IsHead BOOLEAN DEFAULT TRUE; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + DECLARE InternalDomainID INT UNSIGNED DEFAULT 0; + + + + + IF LOCATE("$", EntityID) = 1 THEN + SET InternalEntityID=SUBSTRING(EntityID, 2); + ELSE + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + END IF; + + SELECT internal_id INTO InternalDomainID from entity_ids WHERE id = DomainID; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + IF Version IS NOT NULL THEN + IF InternalDomainID = 0 THEN + SELECT get_head_version(EntityID) = Version INTO IsHead; + ELSE + SELECT get_head_version(DomainID) = Version INTO IsHead; + END IF; + END IF; + + IF IsHead IS FALSE THEN + SELECT e._iversion INTO IVersion + FROM entity_version as e + WHERE ((e.entity_id = InternalEntityID AND InternalDomainID = 0) + OR (e.entity_id = InternalDomainID)) + AND e.version = Version; + + IF IVersion IS NULL THEN + + LEAVE retrieveEntityPropertiesBody; + END IF; + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_double_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_integer_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + CONCAT(value, '.NULL.NULL') AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_date_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + CONCAT(value, 'UTC', IF(value_ns IS NULL, '', value_ns)) + AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_datetime_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_text_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_enum_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + IF(value_iversion IS NULL, + IF(status = "REPLACEMENT", + CONCAT("$", value), + ( SELECT id FROM entity_ids WHERE internal_id = value )), + + CONCAT( + ( SELECT id FROM entity_ids WHERE internal_id = value ), + "@", _get_version(value, value_iversion))) + AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_reference_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + NULL AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_null_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM archive_name_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND property_id != 20 + AND _iversion = IVersion; + + LEAVE retrieveEntityPropertiesBody; + END IF; + END IF; + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM double_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM integer_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + CONCAT(value, '.NULL.NULL') AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM date_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + CONCAT(value, 'UTC', IF(value_ns IS NULL, '', value_ns)) + AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM datetime_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM text_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM enum_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + IF(value_iversion IS NULL, + IF(status = "REPLACEMENT", + CONCAT("$", value), + ( SELECT id FROM entity_ids WHERE internal_id = value )), + + CONCAT( + ( SELECT id FROM entity_ids WHERE internal_id = value ), + "@", _get_version(value, value_iversion))) + AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM reference_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + NULL AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM null_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + + SELECT + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS PropertyID, + value AS PropertyValue, + status AS PropertyStatus, + pidx AS PropertyIndex + FROM name_data + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND property_id != 20; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `retrieveOverrides` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `retrieveOverrides`( + in DomainID VARCHAR(255), + in EntityID VARCHAR(255), + in Version VARBINARY(255)) +retrieveOverridesBody: BEGIN + + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE IsHead BOOLEAN DEFAULT TRUE; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + DECLARE InternalDomainID INT UNSIGNED DEFAULT 0; + + + + + IF LOCATE("$", EntityID) = 1 THEN + SET InternalEntityID=SUBSTRING(EntityID, 2); + ELSE + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + END IF; + + SELECT internal_id INTO InternalDomainID from entity_ids WHERE id = DomainID; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + IF Version IS NOT NULL THEN + IF InternalDomainID = 0 THEN + SELECT get_head_version(EntityID) = Version INTO IsHead; + ELSE + SELECT get_head_version(DomainID) = Version INTO IsHead; + END IF; + END IF; + + IF IsHead IS FALSE THEN + SELECT e._iversion INTO IVersion + FROM entity_version as e + WHERE ((e.entity_id = InternalEntityID AND InternalDomainID = 0) + OR (e.entity_id = InternalDomainID)) + AND e.version = Version; + + IF IVersion IS NULL THEN + + LEAVE retrieveOverridesBody; + END IF; + + + SELECT + NULL AS collection_override, + name AS name_override, + NULL AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM archive_name_overrides + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + NULL AS collection_override, + NULL AS name_override, + description AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM archive_desc_overrides + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + NULL AS collection_override, + NULL AS name_override, + NULL AS desc_override, + (SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_id = datatype + AND property_id = 20 + LIMIT 1) AS type_name_override, + (SELECT id FROM entity_ids WHERE internal_id = datatype) AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM archive_data_type + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion + + UNION ALL + + + SELECT + collection AS collection_override, + NULL AS name_override, + NULL AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM archive_collection_type + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + AND _iversion = IVersion; + + LEAVE retrieveOverridesBody; + END IF; + END IF; + + SELECT + NULL AS collection_override, + name AS name_override, + NULL AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM name_overrides + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + SELECT + NULL AS collection_override, + NULL AS name_override, + description AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM desc_overrides + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + SELECT + NULL AS collection_override, + NULL AS name_override, + NULL AS desc_override, + (SELECT value FROM name_data + WHERE domain_id = 0 + AND entity_ID = datatype + AND property_id = 20 LIMIT 1) AS type_name_override, + (SELECT id FROM entity_ids WHERE internal_id = datatype) AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM data_type + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID + + UNION ALL + + SELECT + collection AS collection_override, + NULL AS name_override, + NULL AS desc_override, + NULL AS type_name_override, + NULL AS type_id_override, + EntityID AS entity_id, + CONCAT("$", property_id) AS InternalPropertyID, + ( SELECT id FROM entity_ids WHERE internal_id = property_id ) AS property_id + FROM collection_type + WHERE domain_id = InternalDomainID + AND entity_id = InternalEntityID; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `retrieveQueryTemplateDef` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `retrieveQueryTemplateDef`( + in EntityID VARCHAR(255), + in Version VARBINARY(255)) +retrieveQueryTemplateDefBody: BEGIN + + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + DECLARE IsHead BOOLEAN DEFAULT TRUE; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + + IF Version IS NOT NULL THEN + SELECT get_head_version(EntityID) = Version INTO IsHead; + END IF; + + IF IsHead IS FALSE THEN + SET IVersion = get_iversion(InternalEntityID, Version); + + IF IVersion IS NULL THEN + + LEAVE retrieveQueryTemplateDefBody; + END IF; + + SELECT definition + FROM archive_query_template_def + WHERE id = InternalEntityID + AND _iversion = IVersion; + + LEAVE retrieveQueryTemplateDefBody; + END IF; + END IF; + + SELECT definition + FROM query_template_def + WHERE id = InternalEntityID; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `setFileProperties` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `setFileProperties`( + in EntityID VARCHAR(255), + in FilePath TEXT, + in FileSize BIGINT UNSIGNED, + in FileHash VARCHAR(255) +) +BEGIN + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + DECLARE IVersion INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID FROM entity_ids WHERE id = EntityID; + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + SELECT max(e._iversion) INTO IVersion + FROM entity_version AS e + WHERE e.entity_id = InternalEntityID; + + INSERT INTO archive_files (file_id, path, size, hash, + _iversion) + SELECT file_id, path, size, hash, IVersion AS _iversion + FROM files + WHERE file_id = InternalEntityID; + END IF; + + DELETE FROM files WHERE file_id = InternalEntityID; + + IF FilePath IS NOT NULL THEN + INSERT INTO files (file_id, path, size, hash) + VALUES (InternalEntityID, FilePath, FileSize, unhex(FileHash)); + END IF; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `set_transaction` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `set_transaction`( + srid VARBINARY(255), + username VARCHAR(255), + realm VARCHAR(255), + seconds BIGINT UNSIGNED, + nanos INT(10) UNSIGNED) +BEGIN + + SET @SRID = srid; + INSERT INTO transactions (srid, username, realm, seconds, nanos) + VALUES (srid, username, realm, seconds, nanos); +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `showEntityAutoIncr` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `showEntityAutoIncr`() +BEGIN +SELECT `AUTO_INCREMENT` +FROM INFORMATION_SCHEMA.TABLES +WHERE TABLE_SCHEMA = 'caosdb' +AND TABLE_NAME = 'entities'; + + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `updateEntity` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `updateEntity`( + in EntityID VARCHAR(255), + in EntityName VARCHAR(255), + in EntityDescription TEXT, + in EntityRole VARCHAR(255), + in DatatypeID VARCHAR(255), + in Collection VARCHAR(255), + in ACL VARBINARY(65525)) +BEGIN + DECLARE ACLID INT UNSIGNED DEFAULT NULL; + DECLARE Hash VARBINARY(255) DEFAULT NULL; + DECLARE Version VARBINARY(255) DEFAULT SHA1(UUID()); + DECLARE ParentVersion VARBINARY(255) DEFAULT NULL; + DECLARE Transaction VARBINARY(255) DEFAULT NULL; + DECLARE OldIVersion INT UNSIGNED DEFAULT NULL; + DECLARE InternalEntityID INT UNSIGNED DEFAULT NULL; + + SELECT internal_id INTO InternalEntityID from entity_ids WHERE id = EntityID; + + call entityACL(ACLID, ACL); + + IF is_feature_config("ENTITY_VERSIONING", "ENABLED") THEN + SELECT max(_iversion) INTO OldIVersion + FROM entity_version + WHERE entity_id = InternalEntityID; + + + INSERT INTO archive_entities (id, description, role, + acl, _iversion) + SELECT e.id, e.description, e.role, e.acl, OldIVersion + FROM entities AS e + WHERE e.id = InternalEntityID; + + INSERT INTO archive_data_type (domain_id, entity_id, property_id, + datatype, _iversion) + SELECT e.domain_id, e.entity_id, e.property_id, e.datatype, + OldIVersion + FROM data_type AS e + WHERE e.domain_id = 0 + AND e.entity_id = 0 + AND e.property_id = InternalEntityID; + + INSERT INTO archive_collection_type (domain_id, entity_id, property_id, + collection, _iversion) + SELECT e.domain_id, e.entity_id, e.property_id, e.collection, + OldIVersion + FROM collection_type as e + WHERE e.domain_id = 0 + AND e.entity_id = 0 + AND e.property_id = InternalEntityID; + + + SET Transaction = @SRID; + SELECT e.version INTO ParentVersion + FROM entity_version as e + WHERE e.entity_id = InternalEntityID + AND e._iversion = OldIVersion; + CALL insert_single_child_version( + InternalEntityID, Hash, Version, + ParentVersion, Transaction); + END IF; + + UPDATE entities e + SET e.description = EntityDescription, + e.role=EntityRole, + e.acl = ACLID + WHERE e.id = InternalEntityID; + + + + DELETE FROM name_data + WHERE domain_id = 0 AND entity_id = InternalEntityID AND property_id = 20; + IF EntityName IS NOT NULL THEN + INSERT INTO name_data + (domain_id, entity_id, property_id, value, status, pidx) + VALUES (0, InternalEntityID, 20, EntityName, "FIX", 0); + END IF; + + DELETE FROM data_type + WHERE domain_id=0 AND entity_id=0 AND property_id=InternalEntityID; + + DELETE FROM collection_type + WHERE domain_id=0 AND entity_id=0 AND property_id=InternalEntityID; + + IF DatatypeID IS NOT NULL THEN + INSERT INTO data_type (domain_id, entity_id, property_id, datatype) + SELECT 0, 0, InternalEntityID, + ( SELECT internal_id FROM entity_ids WHERE id = DatatypeID ); + + IF Collection IS NOT NULL THEN + INSERT INTO collection_type (domain_id, entity_id, property_id, + collection) + SELECT 0, 0, InternalEntityID, Collection; + END IF; + END IF; + + Select Version as Version; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!50003 SET @saved_sql_mode = @@sql_mode */ ; +/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION' */ ; +/*!50003 DROP PROCEDURE IF EXISTS `updateLinCon` */; +/*!50003 SET @saved_cs_client = @@character_set_client */ ; +/*!50003 SET @saved_cs_results = @@character_set_results */ ; +/*!50003 SET @saved_col_connection = @@collation_connection */ ; +/*!50003 SET character_set_client = utf8 */ ; +/*!50003 SET character_set_results = utf8 */ ; +/*!50003 SET collation_connection = utf8_general_ci */ ; +DELIMITER ;; +CREATE DEFINER=`root`@`%` PROCEDURE `updateLinCon`(in sig_from BIGINT, in sig_to BIGINT, in new_a DECIMAL(65,30), in new_b_dividend BIGINT, in new_b_divisor BIGINT, in new_c DECIMAL(65,30)) +BEGIN + UPDATE units_lin_con SET signature_to=sig_to, a=new_a, b_dividend=new_b_dividend, b_divisor=new_b_divisor, c=new_c where signature_from=sig_from; + +END ;; +DELIMITER ; +/*!50003 SET sql_mode = @saved_sql_mode */ ; +/*!50003 SET character_set_client = @saved_cs_client */ ; +/*!50003 SET character_set_results = @saved_cs_results */ ; +/*!50003 SET collation_connection = @saved_col_connection */ ; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + +-- Dump completed on 2024-10-02 9:54:57 diff --git a/integrationtests/test_profile/paths/extroot/README.md b/integrationtests/test_profile/paths/extroot/README.md new file mode 100644 index 00000000..ee741757 --- /dev/null +++ b/integrationtests/test_profile/paths/extroot/README.md @@ -0,0 +1,2 @@ +This directory is mounted into the LinkAhead docker container when the debug +profile is used, to allow the inclusion of external file systems. diff --git a/integrationtests/test_profile/profile.yaml b/integrationtests/test_profile/profile.yaml new file mode 100644 index 00000000..c8a734d6 --- /dev/null +++ b/integrationtests/test_profile/profile.yaml @@ -0,0 +1,29 @@ +default: + paths: + extroot: + "": "paths/extroot" + + refs: + # SERVER: dev + # PYLIB: dev + # MYSQLBACKEND: dev + # WEBUI: dev + # ADVANCEDUSERTOOLS: dev + + # General configuration options + conf: + restore: true + debug: true + timezone: "Cuba" + + network: + + server: + conf: + _CAOSDB_INTEGRATION_TEST_SUITE_KEY: "_CAOSDB_ADV_TEST_SUITE" + TRANSACTION_BENCHMARK_ENABLED: "TRUE" + # SERVER_SIDE_SCRIPTING_BIN_DIRS: "" + + # Development configuration options + # devel: + # jar: /var/build/caosdb-server/0123abcd/target/caosdb-server-<version>-jar-with-dependencies.jar -- GitLab From b3ec716bc5616cc8be576d31997f295257f449e5 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 15 Apr 2025 19:45:35 +0200 Subject: [PATCH 22/36] TEST: Actually running the integration test in the pipeline. --- integrationtests/test.sh | 3 +++ integrationtests/test_profile/profile.yaml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/integrationtests/test.sh b/integrationtests/test.sh index 952a4dd2..4d611609 100755 --- a/integrationtests/test.sh +++ b/integrationtests/test.sh @@ -99,5 +99,8 @@ python3 -m pytest test_yaml_parser.py echo "Testing json-schema exporter" python3 -m pytest test_json_schema_exporter.py +echo "Testing XLSX export/import" +python3 -m pytest test_ex_import_xlsx.py + # Obsolete due to teardown in the above test. # echo "/n/n/n YOU NEED TO RESTART THE SERVER TO REDO TESTS!!!" diff --git a/integrationtests/test_profile/profile.yaml b/integrationtests/test_profile/profile.yaml index c8a734d6..82f95b11 100644 --- a/integrationtests/test_profile/profile.yaml +++ b/integrationtests/test_profile/profile.yaml @@ -1,7 +1,7 @@ default: paths: extroot: - "": "paths/extroot" + "": "../extroot" refs: # SERVER: dev -- GitLab From f0db1ea44988ec5f3cc6ef808af3c8787870c3f8 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Wed, 16 Apr 2025 08:32:57 +0200 Subject: [PATCH 23/36] TEST: Fixed problem with timezone information, added test. --- .docker/docker-compose.yml | 2 +- integrationtests/test_ex_import_xlsx.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml index 798717ba..e8359204 100644 --- a/.docker/docker-compose.yml +++ b/.docker/docker-compose.yml @@ -28,7 +28,7 @@ services: environment: DEBUG: 1 CAOSDB_CONFIG__CAOSDB_INTEGRATION_TEST_SUITE_KEY: "_CAOSDB_ADV_TEST_SUITE" - + CAOSDB_CONFIG_TIMEZONE: "Cuba" networks: caosnet: diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 61949b91..cca75f56 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -126,6 +126,11 @@ def _insert_multiple_refs_data(): cont.insert() + # Check datetime consistency + training_retrieved = db.execute_query("FIND Training", unique=True) + dt_value = datetime.fromisoformat(training_retrieved.get_property("date").value) + assert dt_value == datetime.fromisoformat(json_data["Training"][0]["date"]) + @pytest.fixture(autouse=True) def setup(clear_database): @@ -263,7 +268,10 @@ def test_export_list_refs(tmpdir): sheet_training = filled_generated["Training"] assert sheet_training.max_row == 7 assert sheet_training.max_column == 19 - assert sheet_training["D7"].value == datetime(2024, 3, 21, 15, 12) + date = sheet_training["D7"].value + assert date == datetime.fromisoformat("2024-03-21 14:12:00+00:00" + ).astimezone().replace(tzinfo=None) + sheet_top = filled_generated["Training.Organisation.Person"] assert sheet_top.max_row == 11 assert sheet_top.max_column == 7 -- GitLab From f27e770777c4adf9616cd97b83f3c4e54dd103b7 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Wed, 16 Apr 2025 09:17:50 +0200 Subject: [PATCH 24/36] DOCS: Added fixme to fill_xlsx --- integrationtests/test_ex_import_xlsx.py | 1 + src/caosadvancedtools/table_json_conversion/fill_xlsx.py | 1 + 2 files changed, 2 insertions(+) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index cca75f56..409d3557 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -268,6 +268,7 @@ def test_export_list_refs(tmpdir): sheet_training = filled_generated["Training"] assert sheet_training.max_row == 7 assert sheet_training.max_column == 19 + # Use same transformation as fill_xlsx for datetime comparison. date = sheet_training["D7"].value assert date == datetime.fromisoformat("2024-03-21 14:12:00+00:00" ).astimezone().replace(tzinfo=None) diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index 8c2abaf0..b47dd56f 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -187,6 +187,7 @@ Returns out: union[dict, None] If ``only_collect_insertables`` is True, return a dict (path string -> value) """ + # FIXME The `utc` parameter is neither used, tested nor propagated recursively. assert (current_path is None) is (context is None), ( "`current_path` and `context` must either both be given, or none of them.") if current_path is None: -- GitLab From 07c24c9b247416594bfc2a6b37a4ed726a34bdf9 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 22 Apr 2025 17:57:41 +0200 Subject: [PATCH 25/36] TEST: Test for lists of scalars. --- integrationtests/test_ex_import_xlsx.py | 69 +++++++++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 409d3557..17a898a0 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -82,6 +82,56 @@ def _create_datamodel(modelfile: str): model.sync_data_model(noquestion=True) +def _insert_simple_data(): + """Insert the data from `simple_data.json`.""" + + # 1. Insert enums. + enums = db.Container() + for orgname in ["ECB", "IMF"]: + rec_org = db.Record(name=orgname).add_parent(db.RecordType("Organisation")) + enums.append(rec_org) + enums.insert() + + # 2. Insert data from JSON + json_data_file = rfp_unittest_data("simple_data.json") + with open(json_data_file, encoding="utf-8") as myfile: + json_data = json.load(myfile) + + training_data = json_data["Training"][0] + coaches = [] + for coach_data in training_data["coach"]: + rec_coach = db.Record().add_parent(db.RecordType("Person")) + for propname, value in coach_data.items(): + rec_coach.add_property(propname, value=value) + coaches.append(rec_coach) + rec_supervisor = db.Record().add_parent(db.RecordType("Person")) + for propname, value in training_data["supervisor"].items(): + rec_supervisor.add_property(propname, value=value) + persons = [] + for person_data in json_data["Person"]: + rec_person = db.Record().add_parent(db.RecordType("Person")) + for propname, value in person_data.items(): + rec_person.add_property(propname, value=value) + persons.append(rec_person) + rec_training = db.Record().add_parent(db.RecordType("Training")) + rec_training.add_property("date", datetime.fromisoformat(training_data["date"])) + rec_training.add_property("url", training_data["url"]) + rec_training.add_property("coach", coaches) + rec_training.add_property("supervisor", rec_supervisor) + rec_training.add_property("duration", training_data["duration"]) + rec_training.add_property("participants", training_data["participants"]) + rec_training.add_property("subjects", training_data["subjects"]) + rec_training.add_property("remote", training_data["remote"]) + + cont = db.Container() + cont.append(rec_training) + cont.append(rec_supervisor) + cont.extend(coaches) + cont.extend(persons) + + cont.insert() + + def _insert_multiple_refs_data(): """Insert the data from `multiple_refs_data`. """ @@ -203,6 +253,23 @@ def test_successful_export(): tmp_path.unlink() +def test_export_lists(tmpdir): + """Properties of datatype LIST<TEXT/INTEGER/...>.""" + _create_datamodel(rfp_unittest_data("simple_model.yml")) + _insert_simple_data() + + query_result = db.execute_query("Find Training") + export_import_xlsx.export_container_to_xlsx(records=query_result, + include_referenced_entities=True, + xlsx_data_filepath=tmpdir / "result.xlsx", + jsonschema_filepath=tmpdir / "schema.json", + jsondata_filepath=tmpdir / "data.json", + xlsx_template_filepath=tmpdir / "template.xlsx", + ) + from IPython import embed + embed() + + def test_export_list_refs(tmpdir): """Test the export to XLSX of list-valued references. @@ -279,3 +346,5 @@ def test_export_list_refs(tmpdir): assert sheet_top["B8"].value == sheet_training["B7"].value # Check foreign key component assert sheet_top["B8"].value == sheet_top["B11"].value assert sheet_top["G10"].value == "hermione@organisation.org.uk" + from IPython import embed + embed() -- GitLab From 9475350b536449a65226ec3d6ae33ec507417c6a Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Thu, 24 Apr 2025 16:30:28 +0200 Subject: [PATCH 26/36] WIP: Automatic generation of templates for list properties. --- .../table_json_conversion/table_generator.py | 4 +++- .../table_json_conversion/create_jsonschema.py | 3 ++- .../data/simple_model.yml | 9 +++++---- .../test_table_template_generator.py | 18 +++++++++++++++++- 4 files changed, 27 insertions(+), 7 deletions(-) diff --git a/src/caosadvancedtools/table_json_conversion/table_generator.py b/src/caosadvancedtools/table_json_conversion/table_generator.py index 7726d8ce..4a5fcd85 100644 --- a/src/caosadvancedtools/table_json_conversion/table_generator.py +++ b/src/caosadvancedtools/table_json_conversion/table_generator.py @@ -297,7 +297,9 @@ foreign_keys: list[list[str]] assert d['type'] == 'string' assert d['format'] == 'date' or d['format'] == 'date-time' return default_return - if schema["type"] in ['string', 'number', 'integer', 'boolean']: + scalars = ['string', 'number', 'integer', 'boolean'] + # Also add "null" combinations, such as ["string", "null"]. + if schema["type"] in (scalars + [[scal, "null"] for scal in scalars]): if 'format' in schema and schema['format'] == 'data-url': return {} # file; ignore for now return default_return diff --git a/unittests/table_json_conversion/create_jsonschema.py b/unittests/table_json_conversion/create_jsonschema.py index 8ab4ad2d..0a244425 100755 --- a/unittests/table_json_conversion/create_jsonschema.py +++ b/unittests/table_json_conversion/create_jsonschema.py @@ -30,8 +30,9 @@ from caosadvancedtools.models import parser # import tomli -def prepare_datamodel(modelfile, recordtypes: list[str], outfile: str, +def prepare_datamodel(modelfile: str, recordtypes: list[str], outfile: str, do_not_create: list[str] = None): + """Dump the schema generated from ``modelfile`` to ``outfile``.""" if do_not_create is None: do_not_create = [] model = parser.parse_model_from_yaml(modelfile) diff --git a/unittests/table_json_conversion/data/simple_model.yml b/unittests/table_json_conversion/data/simple_model.yml index 74fb5bc5..5dfb2aa2 100644 --- a/unittests/table_json_conversion/data/simple_model.yml +++ b/unittests/table_json_conversion/data/simple_model.yml @@ -30,7 +30,8 @@ Training: ProgrammingCourse: inherit_from_suggested: - Training -Organisation: - recommended_properties: - Country: - datatype: TEXT +## Unused as of now. +# Organisation: +# recommended_properties: +# Country: +# datatype: TEXT diff --git a/unittests/table_json_conversion/test_table_template_generator.py b/unittests/table_json_conversion/test_table_template_generator.py index d9a84dcf..9407d844 100644 --- a/unittests/table_json_conversion/test_table_template_generator.py +++ b/unittests/table_json_conversion/test_table_template_generator.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 # encoding: utf-8 # # This file is a part of the LinkAhead Project. @@ -29,6 +28,7 @@ from caosadvancedtools.table_json_conversion.xlsx_utils import ColumnType from openpyxl import load_workbook from .utils import compare_workbooks +from .create_jsonschema import prepare_datamodel def rfp(*pathcomponents): @@ -274,6 +274,22 @@ def test_model_with_multiple_choice(): outfile=None) +def test_schema_with_null_arrays(tmp_path): + """Schemas may be generated with allow `None` as list content.""" + + # Generate json automatically + prepare_datamodel(modelfile=rfp("data/simple_model.yml"), + recordtypes=["Training", "Person"], + outfile=tmp_path / "simple_schema.json") + + # Compare result + _compare_generated_to_known_good( + schema_file=tmp_path / "simple_schema.json", + known_good=rfp("data/simple_template.xlsx"), + foreign_keys={'Training': {"__this__": ['date', 'url']}}, + outfile=None) + + def test_exceptions(): # Foreign keys must be lists with pytest.raises(ValueError, match="Foreign keys must be a list of strings, but a single " -- GitLab From 5c2eddffce35f13545c1dd0f2201d32a7497f4ab Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 12:59:20 +0200 Subject: [PATCH 27/36] WIP: Automatic list enum references. --- integrationtests/test_ex_import_xlsx.py | 92 ++++++++- src/caosadvancedtools/json_schema_exporter.py | 177 +++++++++--------- .../export_import_xlsx.py | 3 +- .../data/multiple_choice_schema.json | 6 +- unittests/table_json_conversion/utils.py | 22 ++- 5 files changed, 197 insertions(+), 103 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 17a898a0..a2190302 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -82,6 +82,39 @@ def _create_datamodel(modelfile: str): model.sync_data_model(noquestion=True) +def _insert_multiple_choice_data(): + """Insert the data from `multiple_choice_data.json`.""" + # 1. Insert enums. + enums = db.Container() + for skillname in ["Planning", "Communication", "Evaluation"]: + rec = db.Record(name=skillname).add_parent(db.RecordType("Skill")) + enums.append(rec) + for examname in ["Oral", "Written"]: + rec = db.Record(name=examname).add_parent(db.RecordType("ExamType")) + enums.append(rec) + enums.insert() + + # 2. Insert data from JSON + json_data_file = rfp_unittest_data("multiple_choice_data.json") + with open(json_data_file, encoding="utf-8") as myfile: + json_data = json.load(myfile) + + skills = [] + for skillname in ["Planning", "Evaluation"]: + skills.append(db.Record(skillname).retrieve()) + + records = db.Container() + training_data = json_data["Training"][0] + + rec_training = db.Record(name=training_data["name"]).add_parent(db.RecordType("Training")) + rec_training.add_property("date", datetime.fromisoformat(training_data["date"])) + rec_training.add_property("skills", skills) + rec_training.add_property("exam_types", []) + + records.append(rec_training) + records.insert() + + def _insert_simple_data(): """Insert the data from `simple_data.json`.""" @@ -255,10 +288,11 @@ def test_successful_export(): def test_export_lists(tmpdir): """Properties of datatype LIST<TEXT/INTEGER/...>.""" + tmpdir = Path(tmpdir) _create_datamodel(rfp_unittest_data("simple_model.yml")) _insert_simple_data() - query_result = db.execute_query("Find Training") + query_result = db.execute_query("FIND Training") export_import_xlsx.export_container_to_xlsx(records=query_result, include_referenced_entities=True, xlsx_data_filepath=tmpdir / "result.xlsx", @@ -270,6 +304,60 @@ def test_export_lists(tmpdir): embed() +def test_multiple_choice(tmpdir): + """List properties of enum references.""" + tmpdir = Path(tmpdir) + _create_datamodel(rfp_unittest_data("multiple_choice_model.yaml")) + _insert_multiple_choice_data() + + query_result = db.execute_query("FIND Training") + export_import_xlsx.export_container_to_xlsx(records=query_result, + include_referenced_entities=True, + xlsx_data_filepath=tmpdir / "result.xlsx", + jsonschema_filepath=tmpdir / "schema.json", + jsondata_filepath=tmpdir / "data.json", + xlsx_template_filepath=tmpdir / "template.xlsx", + ) + # Check: schema + with open(tmpdir/"schema.json", encoding="utf-8") as schema_f: + schema_generated = json.load(schema_f) + + try: + assert len(schema_generated["properties"]) == 1 # Only 'Training' should be top level + training = schema_generated["properties"]["Training"] + props = training["properties"] + assert len(props["skills"]["items"]["enum"]) == 3 + assert len(props["exam_types"]["items"]["enum"]) == 2 + except KeyError: + print("It seems the generated JSON schema does not have the expected structure!") + raise + + # Check: XLSX template + template_known_good = load_workbook(rfp_unittest_data("multiple_choice_id_template.xlsx")) + template_generated = load_workbook(tmpdir / "template.xlsx") + compare_workbooks(template_generated, template_known_good) + + # Check: Data json content + with open(rfp_unittest_data("multiple_choice_retrieved_data.json"), encoding="utf-8") as myfile: + json_known_good = json.load(myfile) + with open(tmpdir / "data.json", encoding="utf-8") as myfile: + json_generated = purge_from_json(json.load(myfile), remove_keys=["id"]) + assert_equal_jsons(json_generated, json_known_good) + + # Check: Filled XLSX + filled_generated = load_workbook(tmpdir / "result.xlsx") + # For the moment: just check a few samples + assert filled_generated.sheetnames == ['Training'] + sheet_training = filled_generated["Training"] + assert sheet_training.max_row == 7 + assert sheet_training.max_column == 9 + assert sheet_training["E7"].value == "x" + assert sheet_training["F7"].value is None + assert sheet_training["G7"].value == "x" + assert sheet_training["H7"].value is None + assert sheet_training["I7"].value is None + + def test_export_list_refs(tmpdir): """Test the export to XLSX of list-valued references. @@ -294,7 +382,7 @@ def test_export_list_refs(tmpdir): xlsx_template_filepath=tmpdir / "template.xlsx", ) - # Test schema + # Check: schema with open(tmpdir/"schema.json", encoding="utf-8") as schema_f: schema_generated = json.load(schema_f) diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index daa2ebeb..a994d3db 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -55,7 +55,7 @@ single schema. """ from collections import OrderedDict -from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union +from typing import Any, Iterable, Optional, Sequence, Union import linkahead as db from linkahead.cached import cache_clear, cached_query @@ -72,16 +72,16 @@ class JsonSchemaExporter: name_property_for_new_records: bool = False, use_id_for_identification: bool = False, description_property_for_new_records: bool = False, - additional_options_for_text_props: dict = None, - additional_json_schema: Dict[str, dict] = None, - additional_ui_schema: Dict[str, dict] = None, + additional_options_for_text_props: Optional[dict] = None, + additional_json_schema: Optional[dict[str, dict]] = None, + additional_ui_schema: Optional[dict[str, dict]] = None, units_in_description: bool = True, - plain_data_model: bool = False, - do_not_create: List[str] = None, - do_not_retrieve: List[str] = None, + do_not_create: Optional[list[str]] = None, + do_not_retrieve: Optional[Union[list[str], str]] = None, no_remote: bool = False, - use_rt_pool: DataModel = None, - multiple_choice: List[str] = None, + use_rt_pool: Optional[DataModel] = None, + multiple_choice: Optional[list[str]] = None, + multiple_choice_guess: bool = False, wrap_files_in_objects: bool = False, ): """Set up a JsonSchemaExporter, which can then be applied on RecordTypes. @@ -112,20 +112,17 @@ class JsonSchemaExporter: description of the corresponding schema entry. If set to false, an additional `unit` key is added to the schema itself which is purely annotational and ignored, e.g., in validation. Default is True. - plain_data_model: bool, optional - If True, represent references as plain objects, without the option to choose from an - enum list of existing entities. Exception: When the reference looks like it *should be* - an enum, the existing Record entries are given as options. This parameter should be set - to True when one needs a generic representation of the data model. - The default is ``False``. do_not_create : list[str], optional A list of reference Property names, for which there should be no option to create them. Instead, only the choice of existing elements should be given. - do_not_retrieve : list[str], optional + do_not_retrieve : list[str] or str, optional A list of RecordType names, for which no Records shall be retrieved. Instead, only an object description should be given. If this list overlaps with the `do_not_create` parameter, the behavior is undefined. + If this parameter is the string "``auto``", only multiple choice references (see + parameter ``multiple_choice``) will be retrieved. + The default is the empty list. no_remote : bool, optional If True, do not attempt to connect to a LinkAhead server at all. Default is False. Note that the exporter may fail if this option is activated and the data model is not @@ -137,6 +134,9 @@ class JsonSchemaExporter: A list of reference Property names which shall be denoted as multiple choice properties. This means that each option in this property may be selected at most once. This is not implemented yet if the Property is not in ``do_not_create`` as well. + multiple_choice_guess : bool, default=False + If True, try to guess for all reference Properties that are not in ``multiple_choice`` + if they are enum-like and thus should be handled as multiple choice. wrap_files_in_objects : bool, optional Whether (lists of) files should be wrapped into an array of objects that have a file property. The sole purpose of this wrapping is to @@ -144,6 +144,15 @@ class JsonSchemaExporter: bug<https://github.com/rjsf-team/react-jsonschema-form/issues/3957>`_ so only set this to True if you're using the exported schema with react-json-form and you are experiencing the bug. Default is False. + + Notes on reference properties + ----------------------------- + + List references will have the "uniqueItems" property set if: + + - ``do_not_retrieve`` is not set for the referenced RecordType + - ``multiple_choice`` is true or guessed to be true (if ``multiple_choice_guess`` is set) + """ if not additional_options_for_text_props: additional_options_for_text_props = {} @@ -168,15 +177,15 @@ class JsonSchemaExporter: self._additional_json_schema = additional_json_schema self._additional_ui_schema = additional_ui_schema self._units_in_description = units_in_description - self._plain_data_model = plain_data_model self._do_not_create = do_not_create self._do_not_retrieve = do_not_retrieve self._no_remote = no_remote self._use_rt_pool = use_rt_pool self._multiple_choice = multiple_choice + self._multiple_choice_guess = multiple_choice_guess self._wrap_files_in_objects = wrap_files_in_objects - @staticmethod + @ staticmethod def _make_required_list(rt: db.RecordType): """Return the list of names of properties with importance db.OBLIGATORY.""" required_list = [] @@ -188,7 +197,8 @@ class JsonSchemaExporter: return required_list - def _make_segment_from_prop(self, prop: db.Property) -> Tuple[OrderedDict, dict]: + def _make_segment_from_prop(self, prop: db.Property, multiple_choice_enforce: bool = False + ) -> tuple[OrderedDict, dict]: """Return the JSON Schema and ui schema segments for the given property. The result may either be a simple json schema segment, such as a `string @@ -202,6 +212,9 @@ Parameters prop : db.Property The property to be transformed. +multiple_choice_enforce : bool, default=False + If True, this property shall be handled as multiple choice items. + Returns ------- @@ -248,19 +261,29 @@ ui_schema : dict json_prop["type"] = "integer" elif prop.datatype == db.DOUBLE: json_prop["type"] = "number" + # list-valued non-files elif is_list_datatype(prop.datatype) and not ( - self._wrap_files_in_objects and get_list_datatype(prop.datatype, - strict=True) == db.FILE): + self._wrap_files_in_objects + and get_list_datatype(prop.datatype, strict=True) == db.FILE): json_prop["type"] = "array" list_element_prop = db.Property( name=prop.name, datatype=get_list_datatype(prop.datatype, strict=True)) - json_prop["items"], inner_ui_schema = self._make_segment_from_prop(list_element_prop) + + # Is this a multiple choice array? + multiple_choice = prop.name in self._multiple_choice + prop.name in self._do_not_create + if not multiple_choice and self._multiple_choice_guess: + multiple_choice = self._guess_recordtype_is_enum(list_element_prop.datatype) + + # Get inner content of list + json_prop["items"], inner_ui_schema = self._make_segment_from_prop( + list_element_prop, multiple_choice_enforce=multiple_choice) if "type" in json_prop["items"] and ( json_prop["items"]["type"] in ["boolean", "integer", "number", "string"] ): json_prop["items"]["type"] = [json_prop["items"]["type"], "null"] - if prop.name in self._multiple_choice and prop.name in self._do_not_create: + if multiple_choice: # TODO: if not multiple_choice, but do_not_create: # "ui:widget" = "radio" & "ui:inline" = true # TODO: set threshold for number of items. @@ -269,6 +292,7 @@ ui_schema : dict ui_schema["ui:inline"] = True if inner_ui_schema: ui_schema["items"] = inner_ui_schema + # scalar references elif prop.is_reference(): # We must distinguish between multiple kinds of "reference" properties. @@ -324,11 +348,15 @@ ui_schema : dict prop_name = prop.datatype if isinstance(prop.datatype, db.Entity): prop_name = prop.datatype.name - if prop.name in self._do_not_retrieve or self._plain_data_model: + if (not multiple_choice_enforce + and (self._do_not_retrieve == "auto" + or (isinstance(self._do_not_retrieve, list) + and prop_name in self._do_not_retrieve))): values = [] else: values = self._retrieve_enum_values(f"RECORD '{prop_name}'") - if prop.name in self._do_not_create: + + if multiple_choice_enforce or prop.name in self._do_not_create: # Only a simple list of values json_prop["enum"] = values else: @@ -384,7 +412,7 @@ ui_schema : dict return self._customize(json_prop, ui_schema, prop) - @staticmethod + @ staticmethod def _make_text_property(description="", text_format=None, text_pattern=None) -> OrderedDict: """Create a text element. @@ -431,6 +459,22 @@ ui_schema : dict return prop + def _guess_recordtype_is_enum(self, rt_name: str) -> bool: + """For a given RecordType, guess if it represents an enum. + + Parameters + ---------- + rt_name : str + Name of the RecordType to be guessed. + + Returns + ------- + out : guess + True, if the RecordType is guessed to be an enum. False otherwise. + """ + rt = db.RecordType(rt_name).retrieve() + return len(rt.get_properties()) == 0 + def _retrieve_enum_values(self, role: str): if self._no_remote: @@ -449,7 +493,7 @@ ui_schema : dict return vals - def _make_segment_from_recordtype(self, rt: db.RecordType) -> Tuple[OrderedDict, dict]: + def _make_segment_from_recordtype(self, rt: db.RecordType) -> tuple[OrderedDict, dict]: """Return Json schema and uischema segments for the given RecordType. The result is an element of type `object @@ -515,7 +559,7 @@ ui_schema : dict return schema, ui_schema def _customize(self, schema: OrderedDict, ui_schema: dict, entity: db.Entity = None) -> ( - Tuple[OrderedDict, dict]): + tuple[OrderedDict, dict]): """Generic customization method. Walk over the available customization stores and apply all applicable ones. No specific order is @@ -547,7 +591,7 @@ guaranteed (as of now). return schema, ui_schema def recordtype_to_json_schema(self, rt: db.RecordType, rjsf: bool = False) -> Union[ - dict, Tuple[dict, dict]]: + dict, tuple[dict, dict]]: """Create a jsonschema from a given RecordType that can be used, e.g., to validate a json specifying a record of the given type. @@ -589,17 +633,18 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T use_id_for_identification: bool = False, description_property_for_new_records: bool = False, additional_options_for_text_props: Optional[dict] = None, - additional_json_schema: Dict[str, dict] = None, - additional_ui_schema: Dict[str, dict] = None, + additional_json_schema: Optional[dict[str, dict]] = None, + additional_ui_schema: Optional[dict[str, dict]] = None, units_in_description: bool = True, - do_not_create: List[str] = None, - do_not_retrieve: List[str] = None, + do_not_create: Optional[list[str]] = None, + do_not_retrieve: Optional[Union[list[str], str]] = None, no_remote: bool = False, - use_rt_pool: DataModel = None, - multiple_choice: List[str] = None, + use_rt_pool: Optional[DataModel] = None, + multiple_choice: Optional[list[str]] = None, + multiple_choice_guess: bool = False, rjsf: bool = False, wrap_files_in_objects: bool = False - ) -> Union[dict, Tuple[dict, dict]]: + ) -> Union[dict, tuple[dict, dict]]: """Create a jsonschema from a given RecordType that can be used, e.g., to validate a json specifying a record of the given type. @@ -610,58 +655,8 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T ---------- rt : RecordType The RecordType from which a json schema will be created. - additional_properties : bool, optional - Whether additional properties will be admitted in the resulting - schema. Optional, default is True. - name_property_for_new_records : bool, optional - Whether objects shall generally have a `name` property in the generated schema. Optional, - default is False. - use_id_for_identification: bool, optional - If set to true, an 'id' property is added to all records, and foreign - key references are assumed to be ids. - description_property_for_new_records : bool, optional - Whether objects shall generally have a `description` property in the generated schema. - Optional, default is False. - additional_options_for_text_props : dict, optional - Dictionary containing additional "pattern" or "format" options for - string-typed properties. Optional, default is empty. - additional_json_schema : dict[str, dict], optional - Additional schema content for elements of the given names. - additional_ui_schema : dict[str, dict], optional - Additional ui schema content for elements of the given names. - units_in_description : bool, optional - Whether to add the unit of a LinkAhead property (if it has any) to the - description of the corresponding schema entry. If set to false, an - additional `unit` key is added to the schema itself which is purely - annotational and ignored, e.g., in validation. Default is True. - do_not_create : list[str], optional - A list of reference Property names, for which there should be no option - to create them. Instead, only the choice of existing elements should - be given. - do_not_retrieve : list[str], optional - A list of RedcordType names, for which no Records shall be retrieved. Instead, only an - object description should be given. If this list overlaps with the `do_not_create` - parameter, the behavior is undefined. - no_remote : bool, optional - If True, do not attempt to connect to a LinkAhead server at all. Default is False. - use_rt_pool : models.data_model.DataModel, optional - If given, do not attempt to retrieve RecordType information remotely but from this parameter - instead. - multiple_choice : list[str], optional - A list of reference Property names which shall be denoted as multiple choice properties. - This means that each option in this property may be selected at most once. This is not - implemented yet if the Property is not in ``do_not_create`` as well. - rjsf : bool, optional - If True, uiSchema definitions for react-jsonschema-forms will be output as the second return - value. Default is False. - wrap_files_in_objects : bool, optional - Whether (lists of) files should be wrapped into an array of objects that - have a file property. The sole purpose of this wrapping is to provide a - workaround for a `react-jsonschema-form bug - <https://github.com/rjsf-team/react-jsonschema-form/issues/3957>`_ so - only set this to True if you're using the exported schema with - react-json-form and you are experiencing the bug. Default is False. + The other parameters are identical to the ones use by ``JsonSchemaExporter`` Returns ------- @@ -691,7 +686,7 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T return exporter.recordtype_to_json_schema(rt, rjsf=rjsf) -def make_array(schema: dict, rjsf_uischema: dict = None) -> Union[dict, Tuple[dict, dict]]: +def make_array(schema: dict, rjsf_uischema: dict = None) -> Union[dict, tuple[dict, dict]]: """Create an array of the given schema. The result will look like this: @@ -742,9 +737,9 @@ ui_schema : dict, optional return result -def merge_schemas(schemas: Union[Dict[str, dict], Iterable[dict]], - rjsf_uischemas: Union[Dict[str, dict], Sequence[dict]] = None, - return_data_schema=False) -> (Union[dict, Tuple[dict, dict]]): +def merge_schemas(schemas: Union[dict[str, dict], Iterable[dict]], + rjsf_uischemas: Optional[Union[dict[str, dict], Sequence[dict]]] = None, + return_data_schema=False) -> (Union[dict, tuple[dict, dict]]): """Merge the given schemata into a single schema. The result will look like this: diff --git a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py index d85bb9a0..5677f629 100644 --- a/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/export_import_xlsx.py @@ -71,7 +71,8 @@ def _generate_jsonschema_from_recordtypes(recordtypes: Iterable, schema_generator = JsonSchemaExporter(additional_properties=False, name_property_for_new_records=False, use_id_for_identification=True, - plain_data_model=True) + do_not_retrieve="auto", + multiple_choice_guess=True) schemas = [schema_generator.recordtype_to_json_schema(recordtype) for recordtype in recordtypes] _, data_schema = merge_schemas(schemas, return_data_schema=True) diff --git a/unittests/table_json_conversion/data/multiple_choice_schema.json b/unittests/table_json_conversion/data/multiple_choice_schema.json index 71bf0379..01be127f 100644 --- a/unittests/table_json_conversion/data/multiple_choice_schema.json +++ b/unittests/table_json_conversion/data/multiple_choice_schema.json @@ -30,8 +30,8 @@ "items": { "enum": [ "Planning", - "Communication", - "Evaluation" + "Communication", + "Evaluation" ] }, "uniqueItems": true @@ -41,7 +41,7 @@ "items": { "enum": [ "Oral", - "Written" + "Written" ] }, "uniqueItems": true diff --git a/unittests/table_json_conversion/utils.py b/unittests/table_json_conversion/utils.py index 846809d8..95716eba 100644 --- a/unittests/table_json_conversion/utils.py +++ b/unittests/table_json_conversion/utils.py @@ -22,13 +22,14 @@ """ from datetime import datetime -from typing import Iterable, Union +from typing import Iterable, Optional, Union from openpyxl import Workbook def assert_equal_jsons(json1, json2, allow_none: bool = True, allow_empty: bool = True, - path: list = None) -> None: + ignore_datetime: bool = False, ignore_id_value: bool = False, + path: Optional[list] = None) -> None: """Compare two json objects for near equality. Raise an assertion exception if they are not equal.""" @@ -43,14 +44,19 @@ Raise an assertion exception if they are not equal.""" if key in json1 and key in json2: el1 = json1[key] el2 = json2[key] + if allow_none and (el1 is None and (el2 == [] or el2 == {}) + or el2 is None and (el1 == [] or el1 == {})): + # shortcut in case of equivalent empty content + continue assert isinstance(el1, type(el2)), f"Type mismatch, path: {this_path}" if isinstance(el1, (dict, list)): # Iterables: Recursion assert_equal_jsons(el1, el2, allow_none=allow_none, allow_empty=allow_empty, path=this_path) continue - assert equals_with_casting(el1, el2), ( - f"Values at path {this_path} are not equal:\n{el1},\n{el2}") + if not (ignore_id_value and key == "id"): + assert equals_with_casting(el1, el2, ignore_datetime=ignore_datetime), ( + f"Values at path {this_path} are not equal:\n{el1},\n{el2}") continue # Case 2: exists only in one collection existing = json1.get(key, json2.get(key)) @@ -72,11 +78,15 @@ Raise an assertion exception if they are not equal.""" f"Values at path {this_path} are not equal:\n{el1},\n{el2}") -def equals_with_casting(value1, value2) -> bool: +def equals_with_casting(value1, value2, ignore_datetime: bool = False) -> bool: """Compare two values, return True if equal, False otherwise. Try to cast to clever datatypes. """ try: - return datetime.fromisoformat(value1) == datetime.fromisoformat(value2) + dt1 = datetime.fromisoformat(value1) + dt2 = datetime.fromisoformat(value2) + if ignore_datetime: + return True + return dt1 == dt2 except (ValueError, TypeError): pass return value1 == value2 -- GitLab From 27089fb51d34b569af8585fd3b5aee7bfce74db2 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 15:14:14 +0200 Subject: [PATCH 28/36] WIP: Automatic list enum references. --- integrationtests/test_ex_import_xlsx.py | 47 +++++++++++++++++-- src/caosadvancedtools/json_schema_exporter.py | 20 +++++--- unittests/table_json_conversion/utils.py | 35 ++++++++++++-- 3 files changed, 88 insertions(+), 14 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index a2190302..5402b405 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -300,8 +300,49 @@ def test_export_lists(tmpdir): jsondata_filepath=tmpdir / "data.json", xlsx_template_filepath=tmpdir / "template.xlsx", ) - from IPython import embed - embed() + + # Check: schema + with open(tmpdir/"schema.json", encoding="utf-8") as schema_f: + schema_generated = json.load(schema_f) + + try: + assert len(schema_generated["properties"]) == 1 # Only 'Training' should be top level + training_props = schema_generated["properties"]["Training"]["properties"] + assert_equal_jsons(training_props["subjects"]["items"], + {"type": ["string", "null"]}) + assert training_props["subjects"]["uniqueItems"] is True + assert_equal_jsons(training_props["coach"]["items"]["properties"]["Organisation"], + {"enum": ["ECB", "IMF"]}) + assert_equal_jsons(training_props["supervisor"]["properties"]["Organisation"], + {"enum": ["ECB", "IMF"]}) + except KeyError: + print("It seems the generated JSON schema does not have the expected structure!") + raise + + # Check: XLSX template + template_known_good = load_workbook(rfp("data", "lists_template.xlsx")) + template_generated = load_workbook(tmpdir / "template.xlsx") + compare_workbooks(template_generated, template_known_good) + + # Check: Data json content + with open(rfp_unittest_data("simple_data.json"), encoding="utf-8") as myfile: + json_known_good = json.load(myfile) + json_known_good.pop("Person") + with open(tmpdir / "data.json", encoding="utf-8") as myfile: + json_generated = purge_from_json(json.load(myfile), remove_keys=["id"]) + assert_equal_jsons(json_generated, json_known_good, allow_name_dict=True, ignore_datetime=True) + + # Check: Filled XLSX + filled_generated = load_workbook(tmpdir / "result.xlsx") + # For the moment: just check a few samples + assert filled_generated.sheetnames == ['Training', + 'Training.coach', + ] + sheet_training = filled_generated["Training"] + assert sheet_training["K7"].value == "IMF" + sheet_coach = filled_generated["Training.coach"] + assert sheet_coach["G7"].value == "ECB" + assert sheet_coach["G8"].value == "ECB" def test_multiple_choice(tmpdir): @@ -434,5 +475,3 @@ def test_export_list_refs(tmpdir): assert sheet_top["B8"].value == sheet_training["B7"].value # Check foreign key component assert sheet_top["B8"].value == sheet_top["B11"].value assert sheet_top["G10"].value == "hermione@organisation.org.uk" - from IPython import embed - embed() diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index a994d3db..87d5e867 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -348,15 +348,21 @@ ui_schema : dict prop_name = prop.datatype if isinstance(prop.datatype, db.Entity): prop_name = prop.datatype.name - if (not multiple_choice_enforce - and (self._do_not_retrieve == "auto" - or (isinstance(self._do_not_retrieve, list) - and prop_name in self._do_not_retrieve))): - values = [] - else: + + # Find out if this property is an enum. + is_enum = (multiple_choice_enforce + or + (self._do_not_retrieve == "auto" or self._multiple_choice_guess) + and self._guess_recordtype_is_enum(prop_name)) + if (isinstance(self._do_not_retrieve, list) and prop_name in self._do_not_retrieve): + is_enum = False + + if is_enum: values = self._retrieve_enum_values(f"RECORD '{prop_name}'") + else: + values = [] - if multiple_choice_enforce or prop.name in self._do_not_create: + if is_enum or prop.name in self._do_not_create: # Only a simple list of values json_prop["enum"] = values else: diff --git a/unittests/table_json_conversion/utils.py b/unittests/table_json_conversion/utils.py index 95716eba..aa6b6842 100644 --- a/unittests/table_json_conversion/utils.py +++ b/unittests/table_json_conversion/utils.py @@ -29,12 +29,25 @@ from openpyxl import Workbook def assert_equal_jsons(json1, json2, allow_none: bool = True, allow_empty: bool = True, ignore_datetime: bool = False, ignore_id_value: bool = False, + allow_name_dict: bool = False, path: Optional[list] = None) -> None: """Compare two json objects for near equality. -Raise an assertion exception if they are not equal.""" +Raise an assertion exception if they are not equal. + +Parameters +---------- + +allow_name_dict: bool, default=False + If True, a string and a dict ``{"name": "string's value"}`` are considered equal. + """ if path is None: path = [] + + # if allow_name_dict: + # if ((isinstance(json1, str) and isinstance(json2, dict)) or + # (isinstance(json2, str) and isinstance(json1, dict))): + # breakpoint() assert isinstance(json1, dict) == isinstance(json2, dict), f"Type mismatch, path: {path}" if isinstance(json1, dict): keys = set(json1.keys()).union(json2.keys()) @@ -48,11 +61,25 @@ Raise an assertion exception if they are not equal.""" or el2 is None and (el1 == [] or el1 == {})): # shortcut in case of equivalent empty content continue + if allow_name_dict: # Special exception + my_str = None + if isinstance(el1, str) and isinstance(el2, dict): + my_str = el1 + my_dict = el2 + elif isinstance(el2, str) and isinstance(el1, dict): + my_str = el2 + my_dict = el1 + if my_str is not None: + if len(my_dict) == 1 and my_dict.get("name") == my_str: + continue assert isinstance(el1, type(el2)), f"Type mismatch, path: {this_path}" if isinstance(el1, (dict, list)): # Iterables: Recursion - assert_equal_jsons(el1, el2, allow_none=allow_none, allow_empty=allow_empty, - path=this_path) + assert_equal_jsons( + el1, el2, allow_none=allow_none, allow_empty=allow_empty, + ignore_datetime=ignore_datetime, ignore_id_value=ignore_id_value, + allow_name_dict=allow_name_dict, + path=this_path) continue if not (ignore_id_value and key == "id"): assert equals_with_casting(el1, el2, ignore_datetime=ignore_datetime), ( @@ -72,6 +99,8 @@ Raise an assertion exception if they are not equal.""" this_path = path + [idx] if isinstance(el1, dict): assert_equal_jsons(el1, el2, allow_none=allow_none, allow_empty=allow_empty, + ignore_datetime=ignore_datetime, ignore_id_value=ignore_id_value, + allow_name_dict=allow_name_dict, path=this_path) else: assert equals_with_casting(el1, el2), ( -- GitLab From a86cc51bc13f386640e4ef7fee2fe5f36e1c8e3e Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 15:48:28 +0200 Subject: [PATCH 29/36] FIX: No over-eager enum detection. --- src/caosadvancedtools/json_schema_exporter.py | 7 +++---- .../table_json_conversion/test_table_template_generator.py | 4 +++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index 87d5e867..d3d69692 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -354,10 +354,9 @@ ui_schema : dict or (self._do_not_retrieve == "auto" or self._multiple_choice_guess) and self._guess_recordtype_is_enum(prop_name)) - if (isinstance(self._do_not_retrieve, list) and prop_name in self._do_not_retrieve): - is_enum = False - - if is_enum: + if is_enum or not ( + isinstance(self._do_not_retrieve, list) + and prop_name in self._do_not_retrieve): values = self._retrieve_enum_values(f"RECORD '{prop_name}'") else: values = [] diff --git a/unittests/table_json_conversion/test_table_template_generator.py b/unittests/table_json_conversion/test_table_template_generator.py index 9407d844..b4291818 100644 --- a/unittests/table_json_conversion/test_table_template_generator.py +++ b/unittests/table_json_conversion/test_table_template_generator.py @@ -64,6 +64,7 @@ out: tuple filepath=outpath) assert os.path.exists(outpath) generated = load_workbook(outpath) + good = load_workbook(known_good) compare_workbooks(generated, good) return generated, good @@ -280,7 +281,8 @@ def test_schema_with_null_arrays(tmp_path): # Generate json automatically prepare_datamodel(modelfile=rfp("data/simple_model.yml"), recordtypes=["Training", "Person"], - outfile=tmp_path / "simple_schema.json") + outfile=tmp_path / "simple_schema.json", + do_not_create=["Organisation"]) # Compare result _compare_generated_to_known_good( -- GitLab From 161ec44775f5f682a05f141a1a5511af273d1eb8 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 15:49:17 +0200 Subject: [PATCH 30/36] TEST: Added test data files. --- integrationtests/data/lists_template.xlsx | Bin 0 -> 7166 bytes .../data/multiple_choice_id_template.xlsx | Bin 0 -> 6013 bytes .../data/multiple_choice_model.yaml | 17 +++++++++++++++++ .../data/multiple_choice_retrieved_data.json | 17 +++++++++++++++++ 4 files changed, 34 insertions(+) create mode 100644 integrationtests/data/lists_template.xlsx create mode 100644 unittests/table_json_conversion/data/multiple_choice_id_template.xlsx create mode 100644 unittests/table_json_conversion/data/multiple_choice_model.yaml create mode 100644 unittests/table_json_conversion/data/multiple_choice_retrieved_data.json diff --git a/integrationtests/data/lists_template.xlsx b/integrationtests/data/lists_template.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..baac7cfe8a9f5b1cf00008cbbe0ef49f01d04436 GIT binary patch literal 7166 zcmWIWW@Zs#;Nak3a88>O#ef9385kJii&Arn_4PpH+DQlf4jb^ay|3=l`fhsHfn(va z-98?>6;0$0sN_j+?Oc4M`O`NSVIiL0O^5pm>+1g{ExqTHr*qFh%{#6$G_sRvuUc~L z@*}7J6;|q-on2ISOwhtTDBoGDsNdh~%bACcr<PQgOz`4*@!8k=BIEPqHz^FZ4(&y; zq2)&ZQaYZsM84?}DohVsbfcbSPm6DEwrQTHyiApy@uP61$%<tsynGhdEVtMr#GBg2 zRe3M9cIWB)@2Bud@)&sd3Qkvi_U?q(<?Vfnf2W*&Ib(+KuAR?{3X;CCo!ukUVvt@^ z?W<ht;qz|3sH}0kS9jFBKc}Aw2Y9n{w9Yb_^^Spo;T#hK1MXnrXJBBc$k8v)FUn5J z&(GGY$j#{uy4`onK%n-0xJzAo%nlKOL|5jE>#oce+x7d6h47<CHzwxhMSs0-U3g;J zs@1Fb1*+3(p3RwQyx%(fkad$;%*h!oe;X<kW}e!4=jfB$W^a8v(sia<ih4Qu=p~%E z`{?eUC131<v)(_f&P)o5iauX+e}j-#)~{7;oim+Yc17)ZH8($t*Th`4VQq^%>%klA zHKs3do$>Pj!2s8Tl50e)Llknk7dZxWzwtep?JV}WiMt|;wQ|ck`^#LJ>LCV?^y4z? zug1CE{1>o1LUwW!7qiBekmdR}%M-*z0~I@_2Xj9(ktkEGchKmbdV{Um_d)03lYh>c zhqqU(H>nT26tpohic?T{HUITIk1Ivr!qP7DWO*B3W<7sC=GiL|;h2k0pVbr{PoH;N zNZdlsr!Dr4+>doz3}?1n?x>iS)_-<Ek`dR3$b(xquIqGOxZ(-NjrBVhD*Shwcsug@ z*Co;~!XCf7pu!v3WB267ZOfxcpHA?eXDPa7+{gKMf;;cC==Z;U0>kbtaQW{bX^|!? zD<gf_wuoEmwQ%VK4b_}2nHqL0WfZ5KcsZ*+_S4sWOo^IZZ`mCM%ytxPlkY9Pzgo5R z(C7GVA~u&VNL{R)RKb2W`P1XS7E<by3$Jufd$>Tn?RDV$!wD9zmcBMQ%B!+EW!68J zdaEmv|0Ye_zftP>2aco9n&d2xr_`n?OzWH6pZa$H;%DXeqW&;}QWF17*BiV{3=BJ% z@uehQNJ=U$smw_&)~m?Pc^etnzt~Km?)`d)J@*}*`nW|OZdvMeb^5C-w<l|b))pKP zc-_<6Q#}37`t|I>PD1DJl}4|O)O68){PWM*8o7SXkIUYF<Na<IeAQ4h?BAQx7u6vm zn^G3V{gju#zJA)i>zPVkhn6nT{_kM@KlYpRt(VNp<+n}AI$t4h_tgEYyZ<CuVmB`3 zeHnLp8dq~jkg3b;b*q}!g-)3wJ3&hB%wYpQsY5OEmnL0bBYn5B^$urlN{`xkv;EVK zN$A&{-*q}QbIBhk*3g#Uj?>thn|0PMvG^JJ<JyF!cf;QsKRkAlr%im4_tTYCn?GHD zy<z_W9S`TX8|D;j3*_k$&6Kc@_`F9wzw_0ZEo=AN?dmNQe0k_;b5hvrD~T1xU-mWr zF+6wtkLy0uluYj}%e>b39+!S6&%ZX3C2`fET`!N<CYP)|k!V)``EQF~QCL%#)%<mm zG5q`w(u#SXRKBl&RNQmTJZ+lvO0VG0m#a3btxs7pt?5Jm+f(Z~-mkEqRW~jE$i*2; z=Y-vA30!l}X>}@h){<+R49(s7I5y5ZE-k!o!JPUA-Y@Q^Su@&?z1+7;ui|3)nSCs8 z53l`dQsQy@)8VKl;k9KY27j*bTN>LIgmt&Y@zktevaI#p_xk*EK}9kza+fB>_hxHM zuey={VaJ6v43p=sf1kPQ`hIWUHR6k3b}PMIY}n_Svgi%7^qSjRYIe4#Or;YiFDsk! zurRV_(bK>u)26mrNlLz1-R;x5+~(}O*~=ZDr!HFdEOS|3>W88uXWl0X7@ofuU2X`2 z=LI)+<h`6-WVl<?w0m}t@qSg)lmB(pX6)N{d-~6p)qjfPAAi5P_0!6E^_qW|?!BCx zIlJEd*1z4X^^B3+ZA^3itdZ$9{H7kp-^U#GFTbGWkb39sHxk!<Zp0P%o=Cks;qi}~ zvyGivg!b4U-tQJv<JL8I$=sk2k@kt7zx8ac=qeV=t~^%#`qAFys|>OYf+iVv+0L_2 zEw<1%($%>;Z`#ibp<mU?=c~jjeJfIyQ0IAgzL%lx=s}SLnGL%qs#?uAh*`vuke4r! z-dS|uo!ch?y9gO`b;Ie^%RGf#3wu(p$Z5oCp4hnOXrV;XJ%ii>2Oc>rsh^fzv-Kla z<u1MrrnRe_Hq3l9wd1VFc~gaVd*+_wSRXFkoorYxv!+bvnbU%0cjjm`g&y6KXf5GB ziN$$=*j-bp<9Sy-dE~$UO4zN^HR(t%!|}BWT?Q;=%6$o34$N&Xf4_$meEh@LcmC^x zFZX-i<b!hesk2O57Be$22ub40-4c-84XP)KGg4DaiuJ*iAt<Boo#9=)*g~M;{cmlJ zcfve%PXf0sPkUog_DCgo_8Gsc_fiDhL><<s-1e;dyXA;-MAG6bGDV&XzFvG^cClx_ zUHZz+$9sfl`N#ZU^4!XTKhx#Y+arH}?)Xvk%qnJqjg#itk4I_`-uY|wH@jxRn@)8J zZg!C;HILMsM8xMEKg^QeG5-S7^8%sZX^M>7b6$UJN)%byzUc~g)!vEI6({Xp+Q$7& zP}Zc+v*WqRom30)r9y4NDhF!kO=!A)Yu;l1m)#Op4lB0W>xw^r>bdy5yu>dfA)`~7 zI(|;GLf6d<oLBwaz0hv+gYzaoojFSVQ<gSGMT*s_oZ{KIJoY1NiTH$-yq=vEfsDH^ zZ@l!s+0xLHHEp{|lhSdGrxNG5wjAY;_@i@WXT=hGhXDEN%NxUOZDie9UMboZOZ2I~ zGynf6Plc29kMoDd?dxRcT{z72=l0T$>#wGK=$e?}Tf`G|ux#pw{HhrjOE`BnnB~g< z=~m3#+AuMfNp4-Cj?`49T)BcS=f=g~ioaV=Y?|0UJ;(X7n`2Az7Vp*)#xsFor@ue= zc;sxVRd9eOcV<%C<&d_QRtr<Y7u1^+%nkS2+%B+mg3A8u*>~@+JYKre=egA03v<1^ zOSROqonKBC@!V@0sUq6yli&D6BrI+RAJa>@>GxuOJ<oOgx#sZyV9|-~zDJk5`c@h{ zr}dSjM%h&E6NmMMI#V9ao7>}e-aUQ!VKx7NsjAZ~*|$6X;@Z06{pvTDjm~E8Ij(u( z)a-3MTc%&EbU(H{*+VnDKD8(~q#$yiSLTk#O=(lZQyYbZTrJkWza=*1B#&)#_lNmU zUfGN5Z?ZGGyi{!edZXYd-E?_Q6ItV`f*DmyOrorAR0O%#<?M60f3js$&Jos!UeTKt z?ofJ=$am@Q7V~f4F7596ktg)e&~UR*v36mms+gLfjBt3Ethe^YO+weh9v$;qv-Zf^ zsXMCsKP;F!tB%WV^L(bmtM^^=S2Miu>!jD2wMUjtjfm*f*50^9NL#xwS2b_Vky~DO zqB?hf?^s;-sy@yB>)O95Le6XF`y?&-I@xnh*54GN^eFq8M|NKGS9_Rkzs=iv(<bHX zWlxUvt+5vP8@`J9V%?PYRuk&V<9Y2iuV>q~Z<i70+V#`a7Jj{YN~NaukF~(rbH40G zulyc<Xx6l;+Wzyg44?1*EgOI4Dg7##)nYeCZ6W`1&liU?g?<&3u`Je%EK`^@ZE7dW zuYxj#U)(P2i+Zo_IxJpmVJ%Q=VLjo$<hhH>%dVZc6=^KYxrK9Hxx%k(C-z1DKX<pC zlBrxf{YTTpm^<RHem6`$z{hF*_x8D+wYSYnIp6F(+vF%&xpb$)OP=Q&RJLE6Xq(p5 z@~kmj@BH?TTuuqUt6{gSC;k@J?hd$jKxw&I53gu|Lg~yEuHj71_w7Ibjk_Sv>X={f z%F1x*qTeU7r+tlOv`%Wg$z0j0%Wmd$@0R;jp?F>Yy3*S@h3k|<?--}rE^7Z{c<`sK z3&+|+94ssLZSuL(WOPuV@on+sMIT;9%l-B0$k@kiA?UnT)}|%2#Lf8U&bHI0R`y;m z_9%Q{Q;uD2{8NoH(=jErMlyEh)K;$s;fWS)InR?ECF+f@cD!HwqUis%f0qq=7k&E8 zXyX1_O3%A@>5iRw(#^M@MPL774k`w9Elw#PWMN>?7bc?^Gy)fc)1!-LTZkOL|2^1X z&-IubVe91Gmw#QG>-qYeRc`t>U$#UO!H%gZ*5{VrSLQTeGf(+7VY-CuU(4zD8w*T+ z{+JW=x?`t;&g@%p=eO$4Y1L7>S(o*{BBoa2=ClbK-2$nS^KPZ<yy@S(Urqhke3zz; zTMjs?Eh|$>t$S~w<Wtb)(*M+=dG3slBAyYS_9_?k1un_=TKRZ)-h(9_Dce34oWK95 z`KTf1$EMbHk@QOz4`(QGzU*#!vgpe<nY}0Dk6h&R(*3-0=?(+ysrLC7K24uA!T4a( zP4393CD+y!z3Ey1s^;0h`IVpA0$xpA;rDatW|JE4shW@c+y5~CIM;ZkFF@)1u7ql{ zi~sf)YpaDM{hR7~;fX|GW1_x$^`4`j+}C}7s{EU;t?2el-#L$--BeM!z&~~7(L$T8 z_LbjXJ7{ispngz2aQ8NsLw?#P-aAaxxSRQS*5ntBDH3ZY$#Xrne`AvVg?mH8@h$s4 z>a%4mZJb=oBx&~`B1}*9R<7aEg-0%i&N%BB)UnBSxwV@%zhK|RMz5*NN0Rq!kym`W za<1k)k(y00M`t8^h0fh3s`d81fP%!0Yuw2%&0It@D%V9v*I(XlzP9T*v(@J2XF+dP zg=ZhXG1*9S|GHO`^qfDQomj=&t&_vu__ag-X3Vu`VcvhjOP*XZc)Fu5_FLLgdC3>0 zj!YVVlqUKt`?f;-PH*$YyBij<`2K7XE%K0Bqko~RD7GPu?Gv+I5?9t-pSx`Ld~?r* zez?if!1XtL;ZhCpt?ydd6(gfkQg2P3pm8X2=PLILiosuaG!N;Y{IY&R`c>JZH!D8= zn$^>p6<4crG3U_DlaGFh313l(NDT_BlfCB=ueeZo`>Up+P@Ss|3q=byGrTT*<~L{Y zsdWK0dD?#-L>c9K@5w&8D0t(WkTq7Kudj>nU05sd`+et){M4)Y3VGUWyocs?xh_w- zxiX?u>)LwFbsXRCPcGO$xggpwM<7r8&u+Fyc_%kTZ+bOtP5jiDM7cfc&m6^{RYqSt zuqQ)X>c7vl+}7(ge)%3!h1bkg4(7HTHdos7OYV>1;Y928Zr|;tJHy}qTXAsb+odOW zuFWXgH_>zQXI>CKnQy`V{*>j5XLqLkc+7TGdQ#nDtL5Jq&s&~+qITV#S-bJ0;IW^l zckJBXGs~B|XV2AwgBF#EGhIsfp7Wl`Ud$mgONH~cbl=R``<3>o3(R=cc;)TNo1M}% zX^hHq>eiL{Fh$kQd?6Xm**4Golg;(_?EBvHz01)~^SO7gXqDyP35QE&B=1wzH+b07 z7?vw^^>ekMMdi^=vmWGl@4QhUYn!zEjp4zYY+5a9#Z0WPn6F(qCple4aE5icNUQ(+ zO)tWoQc6`eJg^9r-tRoa>A6m*y6Hl;7jL*;D5-x)3|cu=MSt3ar!23p^Q|%t36p9N zooK;!Yi5j~p58*O&uU%Mj-PI}p1ncC$eq2yBI|lVrj1l_X)Hh6=~pwXf6If4qFwW4 z&$zNMFlh1PD~hBbEi%X$2dG^J>%W7?JN8aGpLN)Pr~Q2`*W%xgRHT!8EcQ-uTp#_N z;aK3K3rEZ*eOsPy-`}g-8dRoXEqU_W_umd#*WQ0#5;w7;=b%IAlm!MmZuBM^eArXH zV^?$O?ku^&O_SztkQZM4M|}gM{-k$X)1DV@i#gJwc>l}9H0_;B9MxZcD|3jQ{&ej! zRpwjD%WjpiW!w&3P}bg3pLl$qylmd0>Y0WoIyuf3&6r?(@lr$5gVlOQ>|b8~_%-Qc z{={!@1fO1iuzc!MyZ%>m&V02$edT#do}QINKId-rg?my@|J1pC-)Oa){M)$SjGz!I zc)H=`Ed~aLO?byPL?9tloRL_Rni5=6l$n=a3@WInPW8<{1R4waEgHzTd+l306W=u= ziY8`DvU~#gtQSs@nzhn%mxb%&`}zk2=BRz2w&i^G@%%X|vVUI8%00Mn)3WpVi(8Bh zE=@a?^Q$JXCx4!Wv%ZgJR`!DP<qb6xVwZNzV_kXBZvCG8s=}U4Ou7b}cxQY_&z&n~ z=W|!>*s(^9jZ7bQ^}V0;#PFJv<hdoez5JPz+0R8S-WM$)yZ?^5jdx(`OGR&fX+`Hx z0&E9!8`BqO?$fcekGpguJ#!EH|HkKw6Yb_qyijAL@A2UHh6?4zR_>{6=AIk0JZA_> zc_e7s@aDw&KH3ml+xW$!Wwt>;hHUP}wZe8=6gl%+K6|ZllX#RQli_WWth;4Zx%Z~i z(py=bk1x+t?s2MGJNf$emp#{BrJq(#(vQ_RAsUmhtEyD-ZN}}^O`DrtS##!h+%)~1 zXv3TtdvZ^zc7joipIk=XTjsa?S2ZX6x4tVFxp&4>l`@%aHaRw(Q^QWIx}59sj*p>i z$M&0BGj#0ce>r~B(naQ8^BZwc913p~N^W9gVDM$bm&yeh7#LFWlLLzK3ySrV^NUhJ z5jZ#KwBKO^fjyr^C%*5Tyv0>ObtTJEbuGm}mPcuCa<e>SPM6Jo_I~5#V|%I|ZL>C> zS8F+6ewNjZ=1Y4-S-loXuqtGn-m!w~%a*vdaE%LX9B!||92%=GWW>ci2}#=$`e8v7 z=c$4Vjiy3Yi#It$C#SS+To#?t(YMAaqUiFfHi_A*xV)Y0Jy<PzAA8?vQ0$RtnwzMx zSJ^7k=#}nMzpKrRizTu$93*-tYaEP^VqSGIU7qKRdFo+b{fBExRQCQ_uv5G&R=jkw zW7co~52s)KE0i#Oa%gQAlh)^8Wyw$no(%`2QjQlXf412fyJq?dpT{L)$1WS}jZ*hC z+kGzW@^zX2RquKF3s-w>54b6xwfxfpVS@`Hg_&=3PdENyys<}Z(XE?Dr}G>-dRO7& z1nW%SEs-}L=UZ%;eXnZH!DHuU34eQk(XDymrY76+`VBqpi|k5cU*ElL9Z)QDQr*n# z&Xt0e;~&oN*RcoZvb9}*oRb+D7#`!zWdg`Clvq#zj-Uzf92R<HtLMsHO#xC{GH-Ea zT9;&O*Yb`%F-2chQ|o^nqsfCF|8Lo^H~zi!bxCL$gZvUx-dT^CQWIL<F1=~`Vs-8L z=^ES<XV25lUf}4?k#Tn3i{;z-YNiNmaxqxw_*>#`*93)c3n~w+%neH45che8*M8+P zi6sr@+HSmRlQwB&F$J?+cAmdmTOhV$wR4Tgt?0sC`Z>23d)v50U-0|L(K9Lf)~yqv z+NK+nPF`=?XR&<A14GM0W}l89K3>Th`G8Bm!1A_-vESYAo8G<B+AaO!68C-urIlSb zUu%TUpFZWzVZSGDk7k+GA9L;cd|<!Z$+Mx~*g*-~_2TVGKN%PpW-;PR*uu!cmRwp~ zlAj9>xpN!24jG8FKD@h}<L2!U_KBCabX!=5e4lV{8)I?)3E%G8YUQSPeeLpgewS_k zI<FD9DVViWUt5zwmzPgnH~;Ta3(tcmA0)`NEO^SMv%Bnij52G-Nycs+g>1{^B3Vms zoO{4K-7;BE*)>0CUSW_)-?>9G-#lWRY!z}f@Mf#<Po)~mpC4bD{`KZdx&Qvhy?n*$ z+{Jb2x4{|q;WdVa=?n}E9~lVwE800fuOv0EBtE3FAhkFa<gcTl*8YbL1n<R$8x7 zcAHUg$`<B+L9M{FlsDc<QuZmkm;U{JgEK_5#o;Q4n)bfmao1i>I=g+7pIO4(3tH~N zHg236%XS8&_Q_4({*rapdAFNI=?7F7hVPUKy*Jz2NoKV}Q2SO!QLWZ(lYV{3^iI-< zz9@8}#KunDN1OlDa=W4hk!8HBvDfW(-fVjvW_}>B=;}Aul`?_JSqE3}rwXlJBAmXk z%=SR<qxpB1zCTysU}n2c=-Axa^$8(AJsnQW(79Kx9LH1QJj?08o%ZM3nw&Q+xOP8K zQtCvokJE<(mAj^FGp)Q@_I1ryuJ88M?X&c_4mM<Oo5t8Qcca73)eL*q6c#6Ww*GRw z|1*|1FXL<57x9zQ4a#Q?C2t&=_uoVQa{Z@^eRm(){JF@nt1IdO|GhR_;~y$AMc;pN z{Aj&;td%$QVTG{#PGu|8MvgV&f0cKH^ITD1-nOII;zRP*o3|=;{io$;?(6C|y7Wgk z4LqbH{G3}mlaYZT23)i<GKnxC=17pI1weBo3@8B7ngF*T0=!W*B98)tW(^Sf)EP16 z5YROv4>ExUiV>O%nZcS7!^Y@ZkvnFfQ7D8~e|E4|P}>JQ7==70g>DLRn+nyGFdnR? zfV*_)rXaUFK<zJtTOJC4O@V|T$VDivF?4f~>poB`1z`@e5ZD}yHVe85$YnFAjzpMn zTo_A@iLM>Fi~<$l2<@4oNZO&rIl6A-k^oeWA#^VgL(+{X)zA$<PN1j;RERS$U@1NV Vyjj^mws13WGaO@LU{I0-@c;w6KMw!^ literal 0 HcmV?d00001 diff --git a/unittests/table_json_conversion/data/multiple_choice_id_template.xlsx b/unittests/table_json_conversion/data/multiple_choice_id_template.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..6ee3cbda5f5007e8562b71de1f89f5cdd0ef7f78 GIT binary patch literal 6013 zcmWIWW@Zs#;Nak3XsVtW#ef9385kJii&Arn_4PpH+DQlf4jb^ay|3=l`fhsHfn(va z-98?>6;0$0sN_j+?Oc4M`O`NSVIiL0O^5pm>+1g{ExqTHr*qFh%{#6$G_sRvuUc~L z@*}7J6;|q-on2ISOwhtTDBoGDsNdh~%bACcr<PQgOz`4*@!8k=BIEPqHz^FZ4(&y; zq2)&ZQaYZsM84?}DohVsbfcbSPm6DEwrQTHyiApy@uP61$%<tsynGhdEVtMr#GBg2 zRe3M9cIWB)@2Bud@)&sd3Qkvi_U?q(<?Vfnf2W*&Ib(+KuAR?{3X;CCo!ukUVvt@^ z?W<ht;qz|3sH}0kS9jFBKc}Aw2Y9n{w9Yb_^^Spo;T#hK1MXnrXJBBc$k8v)FUn5J z&(GGY$j#{uI+=IdKw$58ZI*kvx^Y?3)0o5iCNSI;*nM*5Bsa0_E|C+3Im=({FL#N| z?D}@0+2!0J^Zc}XpPt2dznAP0jXJ6$!*+&w>T9PJwP*Idy>zgrQf#Nzjw@VAWsIlu z!ro6)&*htWjlKV#w)AGMjgde0oz-5kQ}z{`h^tp_R$t1jz4_A2dp3wNT)WiYl<?|$ zjK(!ZoiqOvUThMv%qgi`<Cw^v<WQvYZJSVU>grTY6`L~N$~~EOpJ!#L$2>a|zxUF= ztUW!?+mj~;m7Vd@(+yZW)vosBKIgtGg3~8evbu9TKe)sHC$p1sYLH~nwmx@8_21U_ zX7PRWlm56!v}otq-f2c@8GYBYTtr;|tv|6%_DWy+w(@!NTeQAtrfpetdiL*=vGZLT zMRg0TX7yIzm+h!+HJsi0xuf#L=Jv7x!`Uof)`%Qir2QzsqVy9_$oj1-6#x6W=cWIB zscC*;?!vnhJ)$-}*{i=|TQFD9hscPDo{w_GnwXyL>ySy~cj0}$g|RU~!~UYXtIDBi zQyV``*5gR>&A9&Kb=A6WY$`>IEP0wXw6+@8ZFzBS^|U3+e#ci$QZl}vd7`tk(p@>} z+vC3`v(z?cnsi)ym?6-%ee%Kfixoy5`Io1-P0yKB`D=2^(*W@=s<O4+ZEPPHo_=#W zZCkDsZS7^3T3Ubn%zM*~zgR)Z=H&`s_g{<*3<b>ik_|5;*%X&l=A;&b63yGl!1c4u zMC#tJci6Lj!UXTbE<0XD%=~5)x_kSjC7Jwu5=mEYoUq8f6dx}w%+z^4Hh<lgrO_Qq zA1wc!`}yamM2GPG_gSxM4lPohv-Zx-?E9q#rz;&Jwiemjq}l7lE}z;V+ELUN{!VWD zyV_4$c~9p(YUe$6DNrzCcaeFJnSE8<?EHe>Ti5p-^r`gH%Tfsq4vD+X(bE;yob~E# zt^JoQml?B<<V2@$&6ztVe`bgY+vH@w{bHvi^kR;;cvmi5@ux{rtYcT4P<3sCik_wQ zvB(|ACLDPy{#<yGR>%6?#v#d<>$E#M0~XD?u-EY1R>k`Y7gt@BjF^6;WtYN&deaZ@ zmZU08@k<SAQ^}h!C1Qh*>70MR3h!&IojI#Uc-iak=6kgpzis|`cRO>GpXb8J5Y6Q= z7K@_R-_fZyKfTH1QsTAXcB^-n%47coo)J&8X0tuNjL#stJZRRsof=o1q{KHGTCzHd zrgzk3gynT+bl*Mr#rXdjzGEuOc1+RgRk-RlUp3!ET2FQT)uW7yJ7=6Y-0C$y*-QS* zsy~ZmEY8%5mW0n+uXN4yO49W^zrBT5<}X;b@ORH^g`k|m{2wb{8yW6hx+KZe?&XiT zg3jWbcSEd-g%&%zrypkhxyb4Dqcy)=*SvR}^yF5`vDa2Y@1MMnQ4)47{G}7{c|}ii zM~i4|($j47rVai5Hp<P>C$I3B$ejAzzr$j}oe55k?zVelD;G_vzmi$D($DsM@WNxy zHU_mD{ir)}CO?J2NPPWT>6tL-eL|-xyDCU8E&OO&VQ9{?>qpdnO+Q)j_<H$gBm1iU z{ky-I)`ibAmj7I-`1SqUx&P!Rz1{!S`GxAD+TU6~>od&uXMW2$+jRTUoQ`K-`P(l} zJO5z;$K8onFG@FSe>dNo?&ZE~?e-S0Lq4ai*_Z5DcX8qWH+f7P4h^4||G&<_rM}<- z*DbEb$bIq#`D!zc2G8(Z_mt)Pv9{`d*LBYl8eD`sZmzKDTKS>sxBuL)SKby({N@|D zR$aQet~}$*%I7;$E8aC4>|4LTtpA&1r2?CP+wHgWR4wH0$2ZBn-R$0M;4QHDZJGb8 z*ZfnC_jO)Ll+P4lb(CQ0`nLN(I!}<tc7rdq8d67ml`mbH=bm0Q@7jl)!=j%`WF3ls zg|s>-w`vPYX|DPD(Vp?Phw^Imt_3#7($g8P_I<o*vuoL#844FRa}_p+8NBIKI<YwW zV%*|HjupbM`fKYJgbJ|)rhL!nnd>OvAh%a4k^hK8af6NH_c@>Z-rc|VaNj%G|NDIQ zF@bXYoUp4Gw=**^Fw5f0@e+_652`4NGg4DaiuJ*iAt>|jjW`|j*h1v)`S2ZkUR*8t z;~^tAcS*OO#~~gQkqehYHwWifh9ySld3rXipRfH>(7i0NEi1wC(dx&=^2ftJUVZU8 zSx7qijK#v2H%h9uWj?sFoYC`o^5^4!|9sw<S<rZi;i$>c<j<VNpI<-ezu32lT`p0F zN5SLgjh>bhe!AVi1^ooxU*uW7K`3aN0^{_Y+>bqpA}eN3x>1x{ouS&XB<)<mskp<A zhbR1~aQo}q6g+XVfs8Wa!_OTlds3=-;*N?NY-tWTU6G}D?$gw+^O7b%gdQ26%G9W` z^X<AYD{@|hI(wy^(f3@RKK7M)ogt@mPYV4vja2Kf&DnJRz+A_rVJd2mJXLf2+`9g^ zNh)@-riGg@Omr40=E;_fdTgyzmy#8G=d!fHl+)KzQbf)D-542dQ&QLtKfGa6?6<y6 zXd%OYfjy76zGv%yk;nHjH|dDCRcefZP|PHe=FknkiF=mr*6f`gz+K_<n)`o}MpR}( zimYSHbPEyDr4FmDWs>6`EU3Hx)$PcGC+{>PFYZv{DCjWTI6<+&`jppiF(tX#D|OO& zrkG6DTV~OhI&0ZO?>DR;_}Q0z^vNuGzF4B`$&IqNzjp76-F~VlZjt(~z*Nn!)bzC) zwG-4%W_wQz5!<v!a#C!BOQICVn&>(1#-)9)+y2Bg{}1Xqan#GuGxmR6mvQ)co{rcn zOs_snW>IQ7F=yWO?=tx>szR4KOjn&2+Mc?decyb?{!2j`4xfYKAL;fnu2))h`+)Pj zg{1-qQdm--zS^%-&|{>hs^jsOSKLHX>9lK?hroj^dsbBk=m~$}a6F{$c~xE9-%EK# z+0^-OPci+?@7L{o7rU#~d;8g?cQ?<y|9(^ajyK_YcfXfeYQ*r}ij~{jd^+*rrR?I$ zOU1wU-+$`)@Y4$4)0dV`wcq|zpF=3aD5)gfp!KZm^Ztsb+4pVR`wRH$7S0Yy=zXJ* z`O-JfC*~?k-m`?ees3mjo%&I<Bj57SI>kB8N4A_?sF*gl<Fm}g^@?|#kL(fN_`*fU zT2NeHQD?%6ODF$E%)R5r`g@jNp7Zj?Wx<o3ozGYaKA+oBsh)JPGonmMFV0Ek#g+dN zbAQdX`YLTzIm^$tZ{f0F;ld>^ueiu$Ta_w_&Fx4PE?o5Ti_jy7<&8{yC0n~{zt<i) zRcoc+VXARtlUMGkXs^RQ=5zR^tUDIxuKX<4u}xktO5s=9HKvP)_GdI^^nRHx_y7K` zhe75sS9#cls@CUiZH~WTe)_p-edw03U(ZU-q9?oLZ&P#1-!{#`WKt!!OLF8Ehc9*6 zjW2kwx9ljZKF(_&u`Y3YjOvoC?ayr2{mZ+^bkXYR9@a&rr}jv1deNry^@n~|+n#S> zZ+rEVRYb4Pw3=j~BoXbo>t6rP*MB=!M<-laSLSx=gt0Gc*K&*2oqN9h+jZ1OVE@j9 zr=Nd`CS^1AC7W^_zWx2<#BiVEzrAP3Urp?PE*Dz=Az{sm2K9xxszH~(&r?_ua%J7x zS8Ea`AMBWwSmr)&V^#XIt%VVD6{iTN7Do0?Sf;4CZssT93$a!555GulYHgn==n*FS zspo8u(LsU6hvlNm{~zk`*15Ta92ass5b&1cK)Yv!mvCj&k*6Z_<wVM-3Epr|%}5RX zE;c`nWx90Iy#Fz&xhyI?3qP%#A$;unI<3#80(0`e^O{LsR5;PEuQoqmQtixD|L51` zuJ}>pFLb8&eMS16ODs$4?>+je=B5)LVsSiFK3zH6Us>{V!o6b}{Bk^B|GHfii&0)Z zPdz4VdClbi44_ih(4MJNft7(lQv_crD+MWKAsrV`S(^lHltVf(6MeJ6otVF(Zr_i3 zvM2Xg?Co(}|Mv&uvA{<cj+jllw)KCVo^&)<>YEFbFI@k;&*SSWlipVyN1S=sRxM&_ zJ$ST?V{hZ%cW)m`hMiq>??HxY{|0%X)q8|*G>CiZM{WLW5viASXo7X@l9OJ3E3cp2 z)%-S4|MVx%%X1jdiUzNZX5Nyg9k9Fj(Ekm|*LnHk7gx_WIKj(#wn%QG@x`G2qzBsi zM(kg5e?GN+YOf@FL;2MD=J`Qp<(yaJ)8yV)ap!a1UK>#N=8wwfPupj@u3dlcZVkBp z@&E9%>JkG3gDT#RiU=fRiZc?6Qd5FUiZb)k!41!;Qv&^)4MbY+{}uJU-?~%hn9hMs z6JAbRFyUlUO_bIhUZvcPdv$K_-+tTRSBhT#l)C44zI^${ws~>R4aT0eTe*JDHGjkt zvhZ}&TjzZz<@4n&Hfw}vO089RqQ0R<>@LeSrQ)De=Qzb}!S}bjvIa%?Z(nfdoS(Gj z1>Ljvwy$Ljyv4<k`ncbHadmuGRAJZ)zr+*j>pYgfTF>|Bb^pf}_r|r09|WajMF;M< zb?#`BB}4xslbg@p-&Fcw$o0qg2mc?b^OJwhYO%|oR5MkzJ9m#yxZzp5+0VU&W=NN( zP1ojJ=Pk6Xg+(ZWYf0m`&3RJZP7gJni23VY_^?K>tL9&X_sgh70xzT{FNu(GxqR{C zm5w%+eKVy5GnTDhKE1>@dv^A@dkX@p&VI<LdU&9t?)`P;<zL;6Rd(!QDtni@teofW zcPXx1ueMpW4BB_TI4rR3*nDy4_4=L-^(At-Au&ZoPK`b@;&`9RZ+y=d|AiNnhST>= z&dy+DU@&9Fmxcuy7#LFWlLLzK3ySrV^NUhJO@!K@(|(5y1onIuo%nv`+%1eLOINZi zedM63(&cccXRj}BXY$Q=uiqC=Qq$e?XxnCEx%U-w{mN1Wa}GpAt~BgoV_{uTp}OSM z@fW3r-1&hCvcXNFQ=1HLHM>rJqxE#wr&9+uuQZWRKgf2^Bl6T&iM4YsizJ^84=}R+ z!1eTGYO2>W8xzZGJ@uPq3VcqO>K!yZD06sg;)-hbk_{qzwqMB(HeqW@)>y#uRHu(k z&@M}M#-%u$3kK?^f_Cj~>%P?Y=Bl-J^~18=vN`R&SI)~n{QmQ@x3KAyRdrf?8s7ZP z0eKCU0*&Si-j>RKHrq2_FHNJ{(}v4a=Gb9ZnTsk5TWXg4IREq3hPB$0uf7rd5!LY2 zE$eytjY<2Z<ivfRX8(UMv!USE1&?20drvh*nt%6>_Yx@oopWkRi&Dj!o~YF}N1u1u z#aF)gD{sJgQln?GYl-vYXs=0=8|FG1w>S2#sZW}wa(1olI_Dc-pNkh<S9{aAer^ft zmbdF{e$Ttf4~jLLnwZ3LMh1pgc=MM4a;zm56o4Xa?gZcbL!d#E-=d*Mw)(!@btph; zOXe-kOzV=2?ONV3C#LADYBtsHXOJp5rha?<*Bf;wi`Iv3W0qfHsyj<e_0)j^?Ikz2 zyjWd*{<#MCM9G6wOfM{AX4$fY|H<WOb~`VQNTnGU7UlNjH5NXIzMv!GY33=p=DN<Z zOZgjQ*-kz@`zf%wbBESxw!?N!f(k|DjEjs5H+Fw_YJGbA_n}?ZJB}<~!u(dG+ktc6 zvbnuJr9sSV<>$X%5|j1m(VR(hj;3(Mo|Na8U#?pbcID`r6>8d<$7=uE=KorDE|2Zk zm!tY0I$jrjvhq^>)b$|x#>72#yM34YyUu)M^5MSnk1U<P%%G4vTv62Whk=1%E+fA5 zE{q&<$)&|5`MKc0JGYVRkby|+!@J8lZr%=IpLl6Yw}o}c_X+p5F&5{a@a?XxR&ILN z*Di18ciHx@^BRGhf>}HDwKW-ZdHK|J^Zzcj@H}|(L4sV%f~RacyUVV}D6@8)WbD>a z$hKTAlC|{4xd*({EtB<>UGszH6$Y8~ojWx1%_GLiRv}jdZ?+2mRI0K3`SF$MUvIvY z`|p3;%U7(<U0j!bTO8!Chu0VyrZX@wd}JWxuW0A|ypq(slK7Cyg4E(zkiYhZob^Af zAhQ4c`ivd34m~?4aXF9OzHir&N;8i2y6b9KZru4-z3^)4%&o<dk2?=k@7Ir0n4Oz4 zcg;bog`s_F6+J=$KE(@9$?VmUyOeD*yKmFcYYpB`>x(Q`?J+lRu?TBi!k5jg6~dLP z`ZY1zJ4qw@lF*5#6*aCi!sMp-*FJH${DzNf*Q(m0{hY_w<~1ySvh8hmkmaIdSDFIl zPpO1?s-JUuTh)+#l>g4s_;UpgX142uj>Z02mk?5^ppfDtc7C_ud$wJQzKRL+4wXh9 zRGb<7T7D%D?-Nrs#fk?uv0k%h)nx3hxm?xxd;MN#U-8z21E$d@84jL{Sa2tlA@16a z$c;;!WG|N2MK?=F?N$8i{E6*>$7UZkvyG4KA1#*s{!iv#=9hJg#C~kv{;-zcuCex= zojjxc_FFqX7)My#JyQOz!_QZN>9y<s7v)`zzusICxNpT?x5_Nqa{berpLEaqe++b+ z6<_s>6BJ30the*0FfuS)0T;H6Od<>-@PSU`u|3d0C(^hl1In;xfH$f}<k20_&>uph zIwQtFAi8Gcjv_BYCj$dR95YxmVmJt0D{|`x)RRQ$4qyjs1+{g+9ZKYWCAuldEd@|Z z31P}M9<V7m+D+)@AlGA{b_K$m3%p=+Fj^VtCLouop!yDBLKQ!j+7Deja=`*BcoEtS z1d+5ui(z!#$k`WE_#kv=2_fl56h-I;ASW$U1Du2z7_j8|0B=?{kS$ycTnr6N3=Ge~ Gp#}h?2`7F4 literal 0 HcmV?d00001 diff --git a/unittests/table_json_conversion/data/multiple_choice_model.yaml b/unittests/table_json_conversion/data/multiple_choice_model.yaml new file mode 100644 index 00000000..bbbe45b9 --- /dev/null +++ b/unittests/table_json_conversion/data/multiple_choice_model.yaml @@ -0,0 +1,17 @@ +Training: + recommended_properties: + date: + datatype: DATETIME + description: 'The date of the training.' + skills: + datatype: LIST<Skill> + description: Skills that are trained. + exam_types: + datatype: LIST<ExamType> + +# Enum RecordTypes +Skill: + description: Skills that are trained. + +ExamType: + description: The type of an exam. diff --git a/unittests/table_json_conversion/data/multiple_choice_retrieved_data.json b/unittests/table_json_conversion/data/multiple_choice_retrieved_data.json new file mode 100644 index 00000000..78969122 --- /dev/null +++ b/unittests/table_json_conversion/data/multiple_choice_retrieved_data.json @@ -0,0 +1,17 @@ +{ + "Training": [ + { + "name": "Super Skill Training", + "date": "2024-04-17 00:00:00-04:00", + "skills": [ + { + "name": "Planning" + }, + { + "name": "Evaluation" + } + ], + "exam_types": null + } + ] +} -- GitLab From 6663bbaafe1429a38328758a9c823c6f37bf2f9f Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 16:07:55 +0200 Subject: [PATCH 31/36] STYLE: Linting. --- src/caosadvancedtools/json_schema_exporter.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index d3d69692..38ef2d5c 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -60,6 +60,7 @@ from typing import Any, Iterable, Optional, Sequence, Union import linkahead as db from linkahead.cached import cache_clear, cached_query from linkahead.common.datatype import get_list_datatype, is_list_datatype +from linkahead.utils.get_entity import get_entity_by_name from .models.data_model import DataModel @@ -185,7 +186,7 @@ class JsonSchemaExporter: self._multiple_choice_guess = multiple_choice_guess self._wrap_files_in_objects = wrap_files_in_objects - @ staticmethod + @staticmethod def _make_required_list(rt: db.RecordType): """Return the list of names of properties with importance db.OBLIGATORY.""" required_list = [] @@ -271,7 +272,6 @@ ui_schema : dict # Is this a multiple choice array? multiple_choice = prop.name in self._multiple_choice - prop.name in self._do_not_create if not multiple_choice and self._multiple_choice_guess: multiple_choice = self._guess_recordtype_is_enum(list_element_prop.datatype) @@ -417,7 +417,7 @@ ui_schema : dict return self._customize(json_prop, ui_schema, prop) - @ staticmethod + @staticmethod def _make_text_property(description="", text_format=None, text_pattern=None) -> OrderedDict: """Create a text element. @@ -477,7 +477,7 @@ ui_schema : dict out : guess True, if the RecordType is guessed to be an enum. False otherwise. """ - rt = db.RecordType(rt_name).retrieve() + rt = get_entity_by_name(rt_name) return len(rt.get_properties()) == 0 def _retrieve_enum_values(self, role: str): @@ -686,6 +686,7 @@ def recordtype_to_json_schema(rt: db.RecordType, additional_properties: bool = T no_remote=no_remote, use_rt_pool=use_rt_pool, multiple_choice=multiple_choice, + multiple_choice_guess=multiple_choice_guess, wrap_files_in_objects=wrap_files_in_objects ) return exporter.recordtype_to_json_schema(rt, rjsf=rjsf) -- GitLab From de8de48332e31dba925562b30f462e316a3cc514 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 16:29:37 +0200 Subject: [PATCH 32/36] FIX: More json schema export fixes. --- integrationtests/test_ex_import_xlsx.py | 4 +++- src/caosadvancedtools/json_schema_exporter.py | 19 ++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/integrationtests/test_ex_import_xlsx.py b/integrationtests/test_ex_import_xlsx.py index 5402b405..ef8261f2 100755 --- a/integrationtests/test_ex_import_xlsx.py +++ b/integrationtests/test_ex_import_xlsx.py @@ -310,7 +310,9 @@ def test_export_lists(tmpdir): training_props = schema_generated["properties"]["Training"]["properties"] assert_equal_jsons(training_props["subjects"]["items"], {"type": ["string", "null"]}) - assert training_props["subjects"]["uniqueItems"] is True + if "oneOf" in training_props["coach"]["items"]: + raise ValueError("'coach' should be handled as 'do_not_retrieve', no records should " + "have been chosen.") assert_equal_jsons(training_props["coach"]["items"]["properties"]["Organisation"], {"enum": ["ECB", "IMF"]}) assert_equal_jsons(training_props["supervisor"]["properties"]["Organisation"], diff --git a/src/caosadvancedtools/json_schema_exporter.py b/src/caosadvancedtools/json_schema_exporter.py index 38ef2d5c..2b4a977f 100644 --- a/src/caosadvancedtools/json_schema_exporter.py +++ b/src/caosadvancedtools/json_schema_exporter.py @@ -272,8 +272,11 @@ ui_schema : dict # Is this a multiple choice array? multiple_choice = prop.name in self._multiple_choice - if not multiple_choice and self._multiple_choice_guess: + # breakpoint() + if (not multiple_choice and self._multiple_choice_guess + and db.common.datatype.is_reference(list_element_prop.datatype)): multiple_choice = self._guess_recordtype_is_enum(list_element_prop.datatype) + # breakpoint() # Get inner content of list json_prop["items"], inner_ui_schema = self._make_segment_from_prop( @@ -352,11 +355,17 @@ ui_schema : dict # Find out if this property is an enum. is_enum = (multiple_choice_enforce or - (self._do_not_retrieve == "auto" or self._multiple_choice_guess) - and self._guess_recordtype_is_enum(prop_name)) + (self._multiple_choice_guess + and self._guess_recordtype_is_enum(prop_name))) + # If `is_enum` -> always get values + # Otherwise -> `do_not_retrieve` may prevent retrieval if is_enum or not ( - isinstance(self._do_not_retrieve, list) - and prop_name in self._do_not_retrieve): + ( + isinstance(self._do_not_retrieve, list) + and prop_name in self._do_not_retrieve) + or ( + self._do_not_retrieve == "auto" + )): values = self._retrieve_enum_values(f"RECORD '{prop_name}'") else: values = [] -- GitLab From 68470c42b3693bfaeb3719543bc0a7678f69ee82 Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Mon, 28 Apr 2025 17:06:37 +0200 Subject: [PATCH 33/36] TEST: Cleanup of test data. --- unittests/table_json_conversion/data/simple_model.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/unittests/table_json_conversion/data/simple_model.yml b/unittests/table_json_conversion/data/simple_model.yml index 5dfb2aa2..87249f5f 100644 --- a/unittests/table_json_conversion/data/simple_model.yml +++ b/unittests/table_json_conversion/data/simple_model.yml @@ -30,8 +30,3 @@ Training: ProgrammingCourse: inherit_from_suggested: - Training -## Unused as of now. -# Organisation: -# recommended_properties: -# Country: -# datatype: TEXT -- GitLab From bf3da50a42ad50b2b9fcc43a1d0bf1bd14212d6f Mon Sep 17 00:00:00 2001 From: Florian Spreckelsen <f.spreckelsen@indiscale.com> Date: Tue, 29 Apr 2025 15:54:38 +0200 Subject: [PATCH 34/36] BUILD: Increase minimum required version of pylib --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index aa8ced3d..89e4b69e 100755 --- a/setup.py +++ b/setup.py @@ -155,7 +155,7 @@ def setup_package(): author='Henrik tom Wörden', author_email='h.tomwoerden@indiscale.com', python_requires='>=3.9', - install_requires=["linkahead>=0.13.1", + install_requires=["linkahead>=0.17.1", "jsonref", "jsonschema[format]>=4.4.0", "numpy>=1.24.0, < 2", -- GitLab From d49d2709d523a0549f7e831ff17414770232f299 Mon Sep 17 00:00:00 2001 From: Florian Spreckelsen <f.spreckelsen@indiscale.com> Date: Tue, 29 Apr 2025 16:01:33 +0200 Subject: [PATCH 35/36] BUILD: Use correct minimal Pylib version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 89e4b69e..2f257286 100755 --- a/setup.py +++ b/setup.py @@ -155,7 +155,7 @@ def setup_package(): author='Henrik tom Wörden', author_email='h.tomwoerden@indiscale.com', python_requires='>=3.9', - install_requires=["linkahead>=0.17.1", + install_requires=["linkahead>0.17.0", "jsonref", "jsonschema[format]>=4.4.0", "numpy>=1.24.0, < 2", -- GitLab From 8a4f18a7f08612bce83f5c69f7157645afd1943c Mon Sep 17 00:00:00 2001 From: Daniel <d.hornung@indiscale.com> Date: Tue, 29 Apr 2025 18:49:45 +0200 Subject: [PATCH 36/36] REFACTOR: Documentation and small changes. --- CHANGELOG.md | 7 +++++-- src/caosadvancedtools/table_json_conversion/fill_xlsx.py | 2 +- .../table_json_conversion/table_generator.py | 2 +- unittests/table_json_conversion/utils.py | 5 ----- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dc465d78..2d6b41c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,9 +8,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added ### -- Added table_json_conversion.export_import_xlsx with a public function - export_container_to_xlsx, which exports the data of a given Entity +- Added `table_json_conversion.export_import_xlsx` with a public function + `export_container_to_xlsx`, which exports the data of a given Entity Container to a XLSX file. +- Added parameters to the JsonSchemaExporter to + - add `id` property to all RecordTypes + - guess when a reference property probably should be treated like an enum. ### Changed ### diff --git a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py index b47dd56f..005742f0 100644 --- a/src/caosadvancedtools/table_json_conversion/fill_xlsx.py +++ b/src/caosadvancedtools/table_json_conversion/fill_xlsx.py @@ -376,7 +376,7 @@ validation_schema: dict, optional template_filler = TemplateFiller(result_wb, graceful=(validation_schema is None)) template_filler.fill_data(data=data) - if isinstance(result, str): + if not isinstance(result, Path): result = Path(result) result.parent.mkdir(parents=True, exist_ok=True) result_wb.save(result) diff --git a/src/caosadvancedtools/table_json_conversion/table_generator.py b/src/caosadvancedtools/table_json_conversion/table_generator.py index 4a5fcd85..17ed5dac 100644 --- a/src/caosadvancedtools/table_json_conversion/table_generator.py +++ b/src/caosadvancedtools/table_json_conversion/table_generator.py @@ -331,7 +331,7 @@ class XLSXTemplateGenerator(TableTemplateGenerator): sheets = self._generate_sheets_from_schema(schema, foreign_keys, use_ids_as_foreign=use_ids_as_foreign) wb = self._create_workbook_from_sheets_def(sheets) - if isinstance(filepath, str): + if not isinstance(filepath, Path): filepath = Path(filepath) parentpath = filepath.parent parentpath.mkdir(parents=True, exist_ok=True) diff --git a/unittests/table_json_conversion/utils.py b/unittests/table_json_conversion/utils.py index aa6b6842..0134c062 100644 --- a/unittests/table_json_conversion/utils.py +++ b/unittests/table_json_conversion/utils.py @@ -43,11 +43,6 @@ allow_name_dict: bool, default=False """ if path is None: path = [] - - # if allow_name_dict: - # if ((isinstance(json1, str) and isinstance(json2, dict)) or - # (isinstance(json2, str) and isinstance(json1, dict))): - # breakpoint() assert isinstance(json1, dict) == isinstance(json2, dict), f"Type mismatch, path: {path}" if isinstance(json1, dict): keys = set(json1.keys()).union(json2.keys()) -- GitLab