diff --git a/.gitignore b/.gitignore index 55fb3f0d1bc6c101704557da8f35d6e784b5ea89..04a7ba7bdfc8fb1a5e4885e412e31ed218469730 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ __pycache__/ dist/ build/ src/caosdb/version.py +src/linkahead/version.py # documentation _apidoc diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0430a4f6b5ac08d4ab38f00bff78b845e11fb97e..dfe61ff4e0c4a107e6f1e24667e271557eef2de3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,5 +1,5 @@ # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -44,7 +44,7 @@ code_style: - make style allow_failure: true -# pylint tests for pycaosdb +# pylint tests for pylinkahead pylint: tags: [ docker ] stage: linting @@ -61,7 +61,7 @@ unittest_py3.7: image: python:3.7 script: &python_test_script # Python docker has problems with tox and pip so use plain pytest here - - touch ~/.pycaosdb.ini + - touch ~/.pylinkahead.ini - pip install nose pytest pytest-cov python-dateutil jsonschema>=4.4.0 - pip install . - python -m pytest unittests @@ -82,7 +82,7 @@ unittest_py3.9: script: # verify that this actually is Python 3.9 - python3 -c "import sys; assert sys.version.startswith('3.9')" - - touch ~/.pycaosdb.ini + - touch ~/.pylinkahead.ini - make unittest diff --git a/CHANGELOG.md b/CHANGELOG.md index d933e85574438e9cd5e21c546ea531e04502d0fb..7354b52d2305b910076c0f03859b67aa9f135b65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * `Message.__init__` signature changed and `type` defaults to "Info" now. * `Message.__eq__` changed. Equality is equality of `type`, `code`, and `description` now. +* Rename from CaosDB to LinkAhead. For proper migration, follow the instructions + in `migration_to_linkahead.md` and check the documentation at [docs.indiscale.com](https://docs.indiscale.com/caosdb-pylib/README_SETUP.html#migration). ### Deprecated ### diff --git a/Makefile b/Makefile index 0a0888ad0484c0307583e139e65058c38574ed3a..d15c830d8e4cf6e4bc0b519b9fa5b8cb5f224043 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> # Copyright (C) 2020 Daniel Hornung <d.hornung@indiscale.com> @@ -40,7 +40,7 @@ style: .PHONY: style lint: - pylint --unsafe-load-any-extension=y -d all -e E,F src/caosdb/common + pylint --unsafe-load-any-extension=y -d all -e E,F src/linkahead/common .PHONY: lint unittest: diff --git a/README.md b/README.md index 7215591a4f31f1946029442de291eb9ccf9beea1..193cb8f0cff8ff5cee36a40a78e53f070527e2e0 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ the preferred way is also a merge request as describe above (the documentation r However, you can also create an issue for it. * You can also contact us at **info (AT) caosdb.org** and join the CaosDB community on - [#caosdb:matrix.org](https://matrix.to/#/!unwwlTfOznjEnMMXxf:matrix.org). + [#linkahead:matrix.org](https://matrix.to/#/!unwwlTfOznjEnMMXxf:matrix.org). ## License diff --git a/README_SETUP.md b/README_SETUP.md index 01eea85188078ae6f2fe226e89e5c227497b4bd0..b05eff87711b84682aa82bbd0aafd61f2e8c86eb 100644 --- a/README_SETUP.md +++ b/README_SETUP.md @@ -128,3 +128,6 @@ Build documentation in `build/` with `make doc`. ### Troubleshooting ### If the client is to be executed directly from the `/src` folder, an initial `.\setup.py install --user` must be called. + +## Migration ## +TODO diff --git a/RELEASE_GUIDELINES.md b/RELEASE_GUIDELINES.md index 95ee8e314871153476c30790a456242e38dcaf9e..b3025f0021ac82a29db5380be021ccaec86b96f9 100644 --- a/RELEASE_GUIDELINES.md +++ b/RELEASE_GUIDELINES.md @@ -1,7 +1,7 @@ # Release Guidelines for the CaosDB Python Client Library This document specifies release guidelines in addition to the general release -guidelines of the CaosDB Project +guidelines of the LinkAhead Project ([RELEASE_GUIDELINES.md](https://gitlab.com/caosdb/caosdb/blob/dev/RELEASE_GUIDELINES.md)) ## General Prerequisites diff --git a/create_slim_linkahead_wrapper.py b/create_slim_linkahead_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..bb2162ee7642576d5ecbef5f2556fc0ad74a0573 --- /dev/null +++ b/create_slim_linkahead_wrapper.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# encoding: utf-8 +# +# ** header v3.0 +# This file is a part of the CaosDB Project. +# +# Copyright (C) 2021 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +import os +base_root = "src/linkahead/" + +initcontent = """ +from {module} import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import {module}`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) +""" + +for root, dirs, files in os.walk(base_root, topdown=False): + if root.endswith("__pycache__"): + continue + cdir = os.path.join("src/caosdb", root[len(base_root):]) + os.makedirs(cdir, exist_ok=True) + for fi in files: + if not fi.endswith(".py"): + continue + path = os.path.join(cdir, fi) + with open(path, 'w') as cur: + if fi == "__init__.py": + cur.write(initcontent.format(module=".".join( + os.path.join(root, fi[:-3]).split('/')[1:-1]))) + else: + cur.write(initcontent.format(module=".".join( + os.path.join(root, fi[:-3]).split('/')[1:]))) diff --git a/examples/pycaosdb_example.py b/examples/pycaosdb_example.py index c7eab4a64e6797f26b49716499ee4e0267b167a1..9a3d766791ca7a6fd111d734d08ac4cf3b85b75a 100755 --- a/examples/pycaosdb_example.py +++ b/examples/pycaosdb_example.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """A small example to get started with caosdb-pylib. -Make sure that a `pycaosdb.ini` is readable at one of the expected locations. +Make sure that a `pylinkahead.ini` is readable at one of the expected locations. """ import random diff --git a/examples/pycaosdb.ini b/examples/pylinkahead.ini similarity index 97% rename from examples/pycaosdb.ini rename to examples/pylinkahead.ini index 8cf74e43c5db32ed139c4fe371a6c2b3831b2ee1..f37e24e0e5b754ec58a07b034ba2755096f0b441 100644 --- a/examples/pycaosdb.ini +++ b/examples/pylinkahead.ini @@ -1,6 +1,6 @@ # To be found be the caosdb package, the INI file must be located either in -# - $CWD/pycaosdb.ini -# - $HOME/.pycaosdb.ini +# - $CWD/pylinkahead.ini +# - $HOME/.pylinkahead.ini # - the location given in the env variable PYCAOSDBINI [Connection] diff --git a/examples/server_side_script.py b/examples/server_side_script.py index 71bd9c05b4e86133cc356e1c15359701642a9486..0c5013eb7fac64d2ee04e8a7ab02f39342f411a8 100755 --- a/examples/server_side_script.py +++ b/examples/server_side_script.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen diff --git a/examples/set_permissions.py b/examples/set_permissions.py index 8162b11bfefb41b1bcdbc74b8e314f99a61d1a4e..a558bde73897cb6827c93373cc8327efc10e6e15 100755 --- a/examples/set_permissions.py +++ b/examples/set_permissions.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (c) 2019 IndiScale GmbH # Copyright (c) 2019 Daniel Hornung <d.hornung@indiscale.com> @@ -25,7 +25,7 @@ As a result, only a specific user or group may access it. -This script assumes that the user specified in the pycaosdb.ini +This script assumes that the user specified in the pylinkahead.ini configuration can create new entities. """ diff --git a/how-to-merge-after-rename.txt b/how-to-merge-after-rename.txt new file mode 100644 index 0000000000000000000000000000000000000000..f41ed1c575550956cfeffeecd29f945e1fb446d7 --- /dev/null +++ b/how-to-merge-after-rename.txt @@ -0,0 +1,11 @@ +git merge linkahead-rename-step-1 +# resolve potential conflicts and commit +rm -rf src/linkahead +git mv src/caosdb/ src/linkahead +rm -rf src/caosdb +python3 create_slim_linkahead_wrapper.py +git add src +git ci -m "MAINT: rename caosdb to linkahead (module)" +git merge linkahead-rename-step-2 +# resolve potential conflicts and commit +git merge dev diff --git a/setup.py b/setup.py index bfdfa94f4c3e81c2174d722db7aa8192c9df1de8..ce7cd70a626fe552a8ebdf8dbe1ac06247cefcf3 100755 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ # -*- encoding: utf-8 -*- # # -"""caosdb""" +"""linkahead""" import os import subprocess import sys @@ -53,7 +53,7 @@ MICRO = 0 # https://github.com/pypa/packaging/pull/515 # has made it into a release. Probably we should wait for pypa/packaging>=21.4 # https://github.com/pypa/packaging/releases -PRE = "" # "dev" # e.g. rc0, alpha.1, 0.beta-23 +PRE = "" # "dev" # e.g. rc0, alpha.1, 0.beta-23 if PRE: VERSION = "{}.{}.{}-{}".format(MAJOR, MINOR, MICRO, PRE) @@ -91,22 +91,23 @@ def git_version(): def get_version_info(): # Adding the git rev number needs to be done inside write_version_py(), - # otherwise the import of caosdb.version messes up the build under + # otherwise the import of linkahead.version messes up the build under # Python 3. FULLVERSION = VERSION + # Magic which is only really needed in the pipelines. Therefore: a lot of dark pipeline magic. if os.path.exists('.git'): GIT_REVISION = git_version() - elif os.path.exists('caosdb_pylib_commit'): - with open('caosdb_pylib_commit', 'r') as f: + elif os.path.exists('linkahead_pylib_commit'): + with open('linkahead_pylib_commit', 'r') as f: GIT_REVISION = f.read().strip() - elif os.path.exists('src/caosdb/version.py'): + elif os.path.exists('src/linkahead/version.py'): # must be a source distribution, use existing version file try: - from caosdb.version import git_revision as GIT_REVISION + from linkahead.version import git_revision as GIT_REVISION except ImportError: raise ImportError("Unable to import git_revision. Try removing " - "src/caosdb/version.py and the build directory " + "src/linkahead/version.py and the build directory " "before building.") else: GIT_REVISION = "Unknown" @@ -117,9 +118,9 @@ def get_version_info(): return FULLVERSION, GIT_REVISION -def write_version_py(filename='src/caosdb/version.py'): +def write_version_py(filename='src/linkahead/version.py'): cnt = """ -# THIS FILE IS GENERATED FROM caosdb SETUP.PY +# THIS FILE IS GENERATED FROM linkahead SETUP.PY # short_version = '%(version)s' version = '%(version)s' @@ -154,14 +155,14 @@ def setup_package(): write_version_py() metadata = dict( - name='caosdb', + name='linkahead', version=get_version_info()[0], - description='Python Interface for CaosDB', + description='Python Interface for LinkAhead', long_description=long_description, long_description_content_type="text/markdown", author='Timm Fitschen', author_email='t.fitschen@indiscale.com', - url='https://www.caosdb.org', + url='https://www.linkahead.org', license="AGPLv3+", classifiers=[ "Programming Language :: Python :: 3", @@ -178,16 +179,19 @@ def setup_package(): "python-dateutil>=2.8.2", 'PyYAML>=5.4.1', 'future', - ], + ], extras_require={'keyring': ['keyring>=13.0.0'], 'jsonschema': ['jsonschema>=4.4.0']}, setup_requires=["pytest-runner>=2.0,<3dev"], tests_require=["pytest", "pytest-cov", "coverage>=4.4.2", "jsonschema>=4.4.0"], package_data={ - 'caosdb': ['cert/indiscale.ca.crt', 'schema-pycaosdb-ini.yml'], + 'linkahead': ['cert/indiscale.ca.crt', 'schema-pycaosdb-ini.yml'], }, - scripts=["src/caosdb/utils/caosdb_admin.py"] + scripts=[ + "src/linkahead/utils/caosdb_admin.py", + "src/linkahead/utils/linkahead_admin.py" + ] ) try: setup(**metadata) diff --git a/src/caosdb/__init__.py b/src/caosdb/__init__.py index acf323e860a93753b57f2e104531383b412f3fa0..ee00f5854aacc057c701567cd5ecefc9c1d47267 100644 --- a/src/caosdb/__init__.py +++ b/src/caosdb/__init__.py @@ -1,59 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""CaosDB Python bindings. +from linkahead import * +from warnings import warn -Tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or alternatively -in `~/.pycaosdb.ini` upon import. After that, the ini file `pycaosdb.ini` in the current working -directory will be read additionally, if it exists. -""" - -from os import environ, getcwd -# Import of the connection function (which is used to connect to the DB): -from os.path import expanduser, join - -# Import of convenience methods: -import caosdb.apiutils -from caosdb.common import administration -from caosdb.common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, - LIST, REFERENCE, TEXT) -from caosdb.common.state import State, Transition -# Import of the basic API classes: -from caosdb.common.models import (ACL, ALL, FIX, NONE, OBLIGATORY, RECOMMENDED, - SUGGESTED, Container, DropOffBox, Entity, - File, Info, Message, Permissions, Property, - Query, QueryTemplate, Record, RecordType, - delete, execute_query, get_global_acl, - get_known_permissions, raise_errors) -from caosdb.utils.get_entity import get_entity_by_name, get_entity_by_path, get_entity_by_id -from caosdb.configuration import _read_config_files, configure, get_config -from caosdb.connection.connection import configure_connection, get_connection -from caosdb.exceptions import * -try: - from caosdb.version import version as __version__ -except ModuleNotFoundError: - version = "uninstalled" - __version__ = version - -_read_config_files() +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/apiutils.py b/src/caosdb/apiutils.py index a46e30375b924d358448e73aece61562c36c700b..4f49a37b0c67b61e657a47d9a9ab62b4b0ad987c 100644 --- a/src/caosdb/apiutils.py +++ b/src/caosdb/apiutils.py @@ -1,591 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""API-Utils: Some simplified functions for generation of records etc. -""" +from linkahead.apiutils import * +from warnings import warn -import logging -import warnings - -from collections.abc import Iterable -from typing import Any, Dict, List - -from caosdb.common.datatype import is_reference -from caosdb.common.models import (Container, Entity, File, Property, - Record, RecordType, execute_query, - SPECIAL_ATTRIBUTES) -from caosdb.exceptions import CaosDBException - -from caosdb.utils.git_utils import (get_origin_url_in, get_diff_in, - get_branch_in, get_commit_in) - -logger = logging.getLogger(__name__) - - -class EntityMergeConflictError(CaosDBException): - """An error that is raised in case of an unresolvable conflict when merging - two entities. - """ - - -def new_record(record_type, name=None, description=None, - tempid=None, insert=False, **kwargs): - """Function to simplify the creation of Records. - - record_type: The name of the RecordType to use for this record. - (ids should also work.) - name: Name of the new Record. - kwargs: Key-value-pairs for the properties of this Record. - - Returns: The newly created Record. - - Of course this functions requires an open database connection! - """ - - rt = RecordType(name=record_type) - rt.retrieve() - - r = Record(name) - r.add_parent(rt) - - if tempid is not None: - r.id = tempid - - if description is not None: - r.description = description - - # Add all additional properties, treat iterables als multiple - # additions. - - for k, v in kwargs.items(): - if hasattr(v, "encode") or not isinstance(v, Iterable): - v = [v] - - for vv in v: - p = Property(k) - p.retrieve() - p.value = vv - r.add_property(p) - - if insert: - r.insert() - - return r - - -def id_query(ids): - warnings.warn("Please use 'create_id_query', which only creates" - "the string.", DeprecationWarning) - - return execute_query(create_id_query(ids)) - - -def create_id_query(ids): - return "FIND ENTITY WITH " + " OR ".join( - ["ID={}".format(id) for id in ids]) - - -def get_type_of_entity_with(id_): - objs = retrieve_entities_with_ids([id_]) - - if len(objs) == 0: - raise RuntimeError("ID {} not found.".format(id_)) - - if len(objs) > 1: - raise RuntimeError( - "ID {} is not unique. This is probably a bug in the CaosDB server." .format(id_)) - obj = objs[0] - - if isinstance(obj, Record): - return Record - elif isinstance(obj, RecordType): - return RecordType - elif isinstance(obj, Property): - return Property - elif isinstance(obj, File): - return File - elif isinstance(obj, Entity): - return Entity - - -def retrieve_entity_with_id(eid): - return execute_query("FIND ENTITY WITH ID={}".format(eid), unique=True) - - -def retrieve_entities_with_ids(entities): - collection = Container() - step = 20 - - for i in range(len(entities)//step+1): - collection.extend( - execute_query( - create_id_query(entities[i*step:(i+1)*step]))) - - return collection - - -def getOriginUrlIn(folder): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use the caosdb.utils.git_utils.get_origin_url_in instead.""", - DeprecationWarning) - return get_origin_url_in(folder) - - -def getDiffIn(folder, save_dir=None): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use the caosdb.utils.git_utils.get_diff_in instead.""", - DeprecationWarning) - return get_diff_in(folder, save_dir) - - -def getBranchIn(folder): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use the caosdb.utils.git_utils.get_branch_in instead.""", - DeprecationWarning) - return get_branch_in(folder) - - -def getCommitIn(folder): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use the caosdb.utils.git_utils.get_commit_in instead.""", - DeprecationWarning) - return get_commit_in(folder) - - -def compare_entities(old_entity: Entity, new_entity: Entity, compare_referenced_records: bool = False): - """Compare two entites. - - Return a tuple of dictionaries, the first index belongs to additional information for old - entity, the second index belongs to additional information for new entity. - - Additional information means in detail: - - Additional parents (a list under key "parents") - - Information about properties: - - Each property lists either an additional property or a property with a changed: - - datatype - - importance or - - value (not implemented yet) - - In case of changed information the value listed under the respective key shows the - value that is stored in the respective entity. - - If `compare_referenced_records` is `True`, also referenced entities will be - compared using this function (which is then called with - `compare_referenced_records = False` to prevent infinite recursion in case - of circular references). - - Parameters - ---------- - old_entity, new_entity : Entity - Entities to be compared - compare_referenced_records : bool, optional - Whether to compare referenced records in case of both, `old_entity` and - `new_entity`, have the same reference properties and both have a Record - object as value. If set to `False`, only the corresponding Python - objects are compared which may lead to unexpected behavior when - identical records are stored in different objects. Default is False. - - """ - olddiff: Dict[str, Any] = {"properties": {}, "parents": []} - newdiff: Dict[str, Any] = {"properties": {}, "parents": []} - - if old_entity is new_entity: - return (olddiff, newdiff) - - for attr in SPECIAL_ATTRIBUTES: - try: - oldattr = old_entity.__getattribute__(attr) - old_entity_attr_exists = True - except BaseException: - old_entity_attr_exists = False - try: - newattr = new_entity.__getattribute__(attr) - new_entity_attr_exists = True - except BaseException: - new_entity_attr_exists = False - - if old_entity_attr_exists and (oldattr == "" or oldattr is None): - old_entity_attr_exists = False - - if new_entity_attr_exists and (newattr == "" or newattr is None): - new_entity_attr_exists = False - - if not old_entity_attr_exists and not new_entity_attr_exists: - continue - - if ((old_entity_attr_exists ^ new_entity_attr_exists) - or (oldattr != newattr)): - - if old_entity_attr_exists: - olddiff[attr] = oldattr - - if new_entity_attr_exists: - newdiff[attr] = newattr - - # properties - - for prop in old_entity.properties: - matching = [p for p in new_entity.properties if p.name == prop.name] - - if len(matching) == 0: - olddiff["properties"][prop.name] = {} - elif len(matching) == 1: - newdiff["properties"][prop.name] = {} - olddiff["properties"][prop.name] = {} - - if (old_entity.get_importance(prop.name) != - new_entity.get_importance(prop.name)): - olddiff["properties"][prop.name]["importance"] = \ - old_entity.get_importance(prop.name) - newdiff["properties"][prop.name]["importance"] = \ - new_entity.get_importance(prop.name) - - if (prop.datatype != matching[0].datatype): - olddiff["properties"][prop.name]["datatype"] = prop.datatype - newdiff["properties"][prop.name]["datatype"] = \ - matching[0].datatype - - if (prop.unit != matching[0].unit): - olddiff["properties"][prop.name]["unit"] = prop.unit - newdiff["properties"][prop.name]["unit"] = \ - matching[0].unit - - if (prop.value != matching[0].value): - # basic comparison of value objects says they are different - same_value = False - if compare_referenced_records: - # scalar reference - if isinstance(prop.value, Entity) and isinstance(matching[0].value, Entity): - # explicitely not recursive to prevent infinite recursion - same_value = empty_diff( - prop.value, matching[0].value, compare_referenced_records=False) - # list of references - elif isinstance(prop.value, list) and isinstance(matching[0].value, list): - # all elements in both lists actually are entity objects - # TODO: check, whether mixed cases can be allowed or should lead to an error - if all([isinstance(x, Entity) for x in prop.value]) and all([isinstance(x, Entity) for x in matching[0].value]): - # can't be the same if the lengths are different - if len(prop.value) == len(matching[0].value): - # do a one-by-one comparison; the values are the same, if all diffs are empty - same_value = all( - [empty_diff(x, y, False) for x, y in zip(prop.value, matching[0].value)]) - - if not same_value: - olddiff["properties"][prop.name]["value"] = prop.value - newdiff["properties"][prop.name]["value"] = \ - matching[0].value - - if (len(newdiff["properties"][prop.name]) == 0 - and len(olddiff["properties"][prop.name]) == 0): - newdiff["properties"].pop(prop.name) - olddiff["properties"].pop(prop.name) - - else: - raise NotImplementedError( - "Comparison not implemented for multi-properties.") - - for prop in new_entity.properties: - if len([0 for p in old_entity.properties if p.name == prop.name]) == 0: - newdiff["properties"][prop.name] = {} - - # parents - - for parent in old_entity.parents: - if len([0 for p in new_entity.parents if p.name == parent.name]) == 0: - olddiff["parents"].append(parent.name) - - for parent in new_entity.parents: - if len([0 for p in old_entity.parents if p.name == parent.name]) == 0: - newdiff["parents"].append(parent.name) - - return (olddiff, newdiff) - - -def empty_diff(old_entity: Entity, new_entity: Entity, compare_referenced_records: bool = False): - """Check whether the `compare_entities` found any differences between - old_entity and new_entity. - - Parameters - ---------- - old_entity, new_entity : Entity - Entities to be compared - compare_referenced_records : bool, optional - Whether to compare referenced records in case of both, `old_entity` and - `new_entity`, have the same reference properties and both have a Record - object as value. - - """ - olddiff, newdiff = compare_entities( - old_entity, new_entity, compare_referenced_records) - for diff in [olddiff, newdiff]: - for key in ["parents", "properties"]: - if len(diff[key]) > 0: - # There is a difference somewhere in the diff - return False - for key in SPECIAL_ATTRIBUTES: - if key in diff and diff[key]: - # There is a difference in at least one special attribute - return False - # all elements of the two diffs were empty - return True - - -def merge_entities(entity_a: Entity, entity_b: Entity, merge_references_with_empty_diffs=True, - force=False): - """Merge entity_b into entity_a such that they have the same parents and properties. - - datatype, unit, value, name and description will only be changed in entity_a - if they are None for entity_a and set for entity_b. If there is a - corresponding value for entity_a different from None, an - EntityMergeConflictError will be raised to inform about an unresolvable merge - conflict. - - The merge operation is done in place. - - Returns entity_a. - - WARNING: This function is currently experimental and insufficiently tested. Use with care. - - Parameters - ---------- - entity_a, entity_b : Entity - The entities to be merged. entity_b will be merged into entity_a in place - merge_references_with_empty_diffs : bool, optional - Whether the merge is performed if entity_a and entity_b both reference - record(s) that may be different Python objects but have empty diffs. If - set to `False` a merge conflict will be raised in this case - instead. Default is True. - force : bool, optional - If True, in case `entity_a` and `entity_b` have the same properties, the - values of `entity_a` are replaced by those of `entity_b` in the merge. - If `False`, an EntityMergeConflictError is raised instead. Default is False. - - Returns - ------- - entity_a : Entity - The initial entity_a after the in-place merge - - Raises - ------ - EntityMergeConflictError - In case of an unresolvable merge conflict. - - """ - - logger.warning( - "This function is currently experimental and insufficiently tested. Use with care.") - - # Compare both entities: - diff_r1, diff_r2 = compare_entities( - entity_a, entity_b, compare_referenced_records=merge_references_with_empty_diffs) - - # Go through the comparison and try to apply changes to entity_a: - for key in diff_r2["parents"]: - entity_a.add_parent(entity_b.get_parent(key)) - - for key in diff_r2["properties"]: - if key in diff_r1["properties"]: - if ("importance" in diff_r1["properties"][key] and - "importance" in diff_r2["properties"][key]): - if (diff_r1["properties"][key]["importance"] != - diff_r2["properties"][key]["importance"]): - raise NotImplementedError() - elif ("importance" in diff_r1["properties"][key] or - "importance" in diff_r2["properties"][key]): - raise NotImplementedError() - - for attribute in ("datatype", "unit", "value"): - if (attribute in diff_r2["properties"][key] and - diff_r2["properties"][key][attribute] is not None): - if (diff_r1["properties"][key][attribute] is None): - setattr(entity_a.get_property(key), attribute, - diff_r2["properties"][key][attribute]) - elif force: - setattr(entity_a.get_property(key), attribute, - diff_r2["properties"][key][attribute]) - else: - raise EntityMergeConflictError( - f"Entity a ({entity_a.id}, {entity_a.name}) " - f"has a Property '{key}' with {attribute}=" - f"{diff_r2['properties'][key][attribute]}\n" - f"Entity b ({entity_b.id}, {entity_b.name}) " - f"has a Property '{key}' with {attribute}=" - f"{diff_r1['properties'][key][attribute]}") - else: - # TODO: This is a temporary FIX for - # https://gitlab.indiscale.com/caosdb/src/caosdb-pylib/-/issues/105 - entity_a.add_property(id=entity_b.get_property(key).id, - name=entity_b.get_property(key).name, - datatype=entity_b.get_property(key).datatype, - value=entity_b.get_property(key).value, - unit=entity_b.get_property(key).unit, - importance=entity_b.get_importance(key)) - # entity_a.add_property( - # entity_b.get_property(key), - # importance=entity_b.get_importance(key)) - - for special_attribute in ("name", "description"): - sa_a = getattr(entity_a, special_attribute) - sa_b = getattr(entity_b, special_attribute) - if sa_a != sa_b: - if sa_a is None: - setattr(entity_a, special_attribute, sa_b) - elif force: - # force overwrite - setattr(entity_a, special_attribute, sa_b) - else: - raise EntityMergeConflictError( - f"Conflict in special attribute {special_attribute}:\n" - f"A: {sa_a}\nB: {sa_b}") - return entity_a - - -def describe_diff(olddiff, newdiff, name=None, as_update=True): - description = "" - - for attr in list(set(list(olddiff.keys()) + list(newdiff.keys()))): - if attr == "parents" or attr == "properties": - continue - description += "{} differs:\n".format(attr) - description += "old version: {}\n".format( - olddiff[attr] if attr in olddiff else "not set") - description += "new version: {}\n\n".format( - newdiff[attr] if attr in newdiff else "not set") - - if len(olddiff["parents"]) > 0: - description += ("Parents that are only in the old version:\n" - + ", ".join(olddiff["parents"])) - - if len(newdiff["parents"]) > 0: - description += ("Parents that are only in the new version:\n" - + ", ".join(olddiff["parents"])) - - for prop in list(set(list(olddiff["properties"].keys()) - + list(newdiff["properties"].keys()))): - description += "property {} differs:\n".format(prop) - - if prop not in olddiff["properties"]: - description += "it does not exist in the old version: \n" - elif prop not in newdiff["properties"]: - description += "it does not exist in the new version: \n" - else: - description += "old version: {}\n".format( - olddiff["properties"][prop]) - description += "new version: {}\n\n".format( - newdiff["properties"][prop]) - - if description != "": - description = ("## Difference between the old and the new " - "version of {}\n\n".format(name))+description - - return description - - -def apply_to_ids(entities, func): - """ Apply a function to all ids. - - All ids means the ids of the entities themselves but also to all parents, - properties and referenced entities. - - Parameters - ---------- - entities : list of Entity - func : function with one parameter. - """ - - for entity in entities: - _apply_to_ids_of_entity(entity, func) - - -def _apply_to_ids_of_entity(entity, func): - entity.id = func(entity.id) - - for par in entity.parents: - par.id = func(par.id) - - for prop in entity.properties: - prop.id = func(prop.id) - isref = is_reference(prop.datatype) - - if isref: - if isinstance(prop.value, list): - prop.value = [func(el) for el in prop.value] - else: - if prop.value is not None: - prop.value = func(prop.value) - - -def resolve_reference(prop: Property): - """resolves the value of a reference property - - The integer value is replaced with the entity object. - If the property is not a reference, then the function returns without - change. - """ - - if not prop.is_reference(server_retrieval=True): - return - - if isinstance(prop.value, list): - referenced = [] - - for val in prop.value: - if isinstance(val, int): - referenced.append(retrieve_entity_with_id(val)) - else: - referenced.append(val) - prop.value = referenced - else: - if isinstance(prop.value, int): - prop.value = retrieve_entity_with_id(prop.value) - - -def create_flat_list(ent_list: List[Entity], flat: List[Entity]): - """ - Recursively adds all properties contained in entities from ent_list to - the output list flat. Each element will only be added once to the list. - - TODO: Currently this function is also contained in newcrawler module crawl. - We are planning to permanently move it to here. - """ - for ent in ent_list: - for p in ent.properties: - # For lists append each element that is of type Entity to flat: - if isinstance(p.value, list): - for el in p.value: - if isinstance(el, Entity): - if el not in flat: - flat.append(el) - # TODO: move inside if block? - create_flat_list([el], flat) - elif isinstance(p.value, Entity): - if p.value not in flat: - flat.append(p.value) - # TODO: move inside if block? - create_flat_list([p.value], flat) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.apiutils`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/cached.py b/src/caosdb/cached.py index 131526674d7df97d598a6d1bfbc2af7805c63a03..4ac13fc6866324b04f94195f2e447aff2b303069 100644 --- a/src/caosdb/cached.py +++ b/src/caosdb/cached.py @@ -1,184 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2023 IndiScale GmbH <info@indiscale.com> -# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> -# Copyright (C) 2023 Daniel Hornung <d.hornung@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -""" -This module provides some cached versions of functions that retrieve Entities from a remote server. +from linkahead.cached import * +from warnings import warn -See also -======== - -- ``cache_initialize(...)`` : Re-initialize the cache. -- ``cache_clear()`` : Clear the cache. -- ``cached_query(query)`` : A cached version of ``execute_query(query)``. -- ``cached_get_entity_by(...)`` : Get an Entity by name, id, ... -""" - -from enum import Enum -from functools import lru_cache -from typing import Union - -from .utils import get_entity -from .common.models import execute_query, Entity, Container - - -# roughly 1GB for typical entity sizes -DEFAULT_SIZE = 33333 - -# This dict cache is solely for filling the real cache manually (e.g. to reuse older query results) -_DUMMY_CACHE = {} - - -class AccessType(Enum): - """Different access types for cached queries. Needed for filling the cache manually with -:func:`cache_fill` . - - """ - QUERY = 1 - PATH = 2 - EID = 3 - NAME = 4 - - -def cached_get_entity_by(eid: Union[str, int] = None, name: str = None, path: str = None, query: - str = None) -> Entity: - """Return a single entity that is identified uniquely by one argument. - -You must supply exactly one argument. - -If a query phrase is given, the result must be unique. If this is not what you need, use -:func:`cached_query` instead. - - """ - count = 0 - if eid is not None: - count += 1 - if name is not None: - count += 1 - if path is not None: - count += 1 - if query is not None: - count += 1 - if count != 1: - raise ValueError("You must supply exactly one argument.") - - if eid is not None: - return _cached_access(AccessType.EID, eid, unique=True) - if name is not None: - return _cached_access(AccessType.NAME, name, unique=True) - if path is not None: - return _cached_access(AccessType.PATH, path, unique=True) - if query is not None: - return _cached_access(AccessType.QUERY, query, unique=True) - - raise ValueError("Not all arguments may be None.") - - -def cached_query(query_string) -> Container: - """A cached version of :func:`caosdb.execute_query<caosdb.common.models.execute_query>`. - -All additional arguments are at their default values. - - """ - return _cached_access(AccessType.QUERY, query_string, unique=False) - - -@lru_cache(maxsize=DEFAULT_SIZE) -def _cached_access(kind: AccessType, value: Union[str, int], unique=True): - # This is the function that is actually cached. - # Due to the arguments, the cache has kind of separate sections for cached_query and - # cached_get_entity_by with the different AccessTypes. However, there is only one cache size. - - # The dummy dict cache is only for filling the cache manually, it is deleted afterwards. - if value in _DUMMY_CACHE: - return _DUMMY_CACHE[value] - - if kind == AccessType.QUERY: - return execute_query(value, unique=unique) - if kind == AccessType.NAME: - return get_entity.get_entity_by_name(value) - if kind == AccessType.EID: - return get_entity.get_entity_by_id(value) - if kind == AccessType.PATH: - return get_entity.get_entity_by_path(value) - - raise ValueError(f"Unknown AccessType: {kind}") - - -def cache_clear() -> None: - """Empty the cache that is used by `cached_query` and `cached_get_entity_by`.""" - _cached_access.cache_clear() - - -def cache_info(): - """Return info about the cache that is used by `cached_query` and `cached_get_entity_by`. - -Returns -------- - -out: named tuple - See the standard library :func:`functools.lru_cache` for details.""" - return _cached_access.cache_info() - - -def cache_initialize(maxsize=DEFAULT_SIZE) -> None: - """Create a new cache with the given size for `cached_query` and `cached_get_entity_by`. - - This implies a call of :func:`cache_clear`, the old cache is emptied. - - """ - cache_clear() - global _cached_access - _cached_access = lru_cache(maxsize=maxsize)(_cached_access.__wrapped__) - - -def cache_fill(items: dict, kind: AccessType = AccessType.EID, unique: bool = True) -> None: - """Add entries to the cache manually. - - This allows to fill the cache without actually submitting queries. Note that this does not - overwrite existing entries with the same keys. - -Parameters ----------- - -items: dict - A dictionary with the entries to go into the cache. The keys must be compatible with the - AccessType given in ``kind`` - -kind: AccessType, optional - The AccessType, for example ID, name, path or query. - -unique: bool, optional - If True, fills the cache for :func:`cached_get_entity_by`, presumably with - :class:`caosdb.Entity<caosdb.common.models.Entity>` objects. If False, the cache should be filled - with :class:`caosdb.Container<caosdb.common.models.Container>` objects, for use with - :func:`cached_query`. - - """ - # 1. add the given items to the corresponding dummy dict cache - _DUMMY_CACHE.update(items) - - # 2. call the cache function with each key (this only results in a dict look up) - for key in items.keys(): - _cached_access(kind, key, unique=unique) - - # 3. empty the dummy dict cache again - _DUMMY_CACHE.clear() +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.cached`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/__init__.py b/src/caosdb/common/__init__.py index 436281df8077b2cbf357537d36b21567b86ea5a2..3d765c2a092c99c8c0eba62b91e47124d449bac2 100644 --- a/src/caosdb/common/__init__.py +++ b/src/caosdb/common/__init__.py @@ -1 +1,6 @@ -"""Commonly used classes for CaosDB.""" + +from linkahead.common import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/administration.py b/src/caosdb/common/administration.py index a27aaf0406c83ac33c37b676a9cdeab812bf2f7a..b44d52b1587efad10949de3430f1e8063a191821 100644 --- a/src/caosdb/common/administration.py +++ b/src/caosdb/common/administration.py @@ -1,439 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""missing docstring.""" +from linkahead.common.administration import * +from warnings import warn -import re -import string -import random -from caosdb.common.utils import xml2str -from caosdb.connection.connection import get_connection -from caosdb.exceptions import (EntityDoesNotExistError, HTTPClientError, - HTTPForbiddenError, HTTPResourceNotFoundError, - ServerConfigurationException) -from lxml import etree - - -def set_server_property(key, value): - """set_server_property. - - Set a server property. - - Parameters - ---------- - key : str - The name of the server property. - value : str - The value of the server property. - - - Returns - ------- - None - """ - con = get_connection() - try: - con._form_data_request(method="POST", path="_server_properties", - params={key: value}).read() - except EntityDoesNotExistError: - raise ServerConfigurationException( - "Debug mode in server is probably disabled.") from None - - -def get_server_properties(): - """get_server_properties. - - Get all server properties as a dict. - - Returns - ------- - dict - The server properties. - """ - con = get_connection() - try: - body = con._http_request( - method="GET", path="_server_properties") - except EntityDoesNotExistError: - raise ServerConfigurationException( - "Debug mode in server is probably disabled.") from None - - xml = etree.parse(body) - props = dict() - - for elem in xml.getroot(): - props[elem.tag] = elem.text - - return props - - -def get_server_property(key): - """get_server_property. - - Get a server property. - - Parameters - ---------- - key : str - The name of the server property - - Returns - ------- - value : str - The string value of the server property. - - Raises - ------ - KeyError - If the server property is no defined. - """ - - return get_server_properties()[key] - - -def generate_password(length: int): - """Create a random password that fulfills the security requirements - - Parameters - ---------- - length : int - Length of the generated password. Has to be greater than 7. - - Returns - ------- - password : string - Generated random password of the given length - - Raises - ------ - ValueError: - If the length is less than 8. - """ - minimum_password_length = 8 - if length < minimum_password_length: - raise ValueError("CaosDB passwords have to be at least {} characters.".format( - minimum_password_length)) - sample_letters = string.ascii_letters + string.digits + "!#$%*+-/:;?_" - password = ''.join((random.choice(sample_letters) for i in range(length))) - - while not re.match(r"(?=.*[A-Z])(?=.*[a-z])(?=.*\d)(?=.*[\W_]).{8,}", - password): - password = ''.join((random.choice(sample_letters) - for i in range(length))) - - return password - - -def _retrieve_user(name, realm=None, **kwargs): - con = get_connection() - try: - return con._http_request(method="GET", path="User/" + (realm + "/" + name if realm is not None else name), **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to retrieve this user." - raise - except HTTPResourceNotFoundError as e: - e.msg = "User does not exist." - raise - - -def _delete_user(name, **kwargs): - con = get_connection() - try: - return con._http_request(method="DELETE", path="User/" + name, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to delete this user." - raise - except HTTPResourceNotFoundError as e: - e.msg = "User does not exist." - raise - - -def _update_user(name, realm=None, password=None, status=None, - email=None, entity=None, **kwargs): - con = get_connection() - params = {} - - if password is not None: - params["password"] = password - - if status is not None: - params["status"] = status - - if email is not None: - params["email"] = email - - if entity is not None: - params["entity"] = str(entity) - try: - return con.put_form_data(entity_uri_segment="User/" + (realm + "/" + name if realm is not None else name), params=params, **kwargs).read() - except HTTPResourceNotFoundError as e: - e.msg = "User does not exist." - raise e - except HTTPForbiddenError as e: - e.msg = "You are not permitted to update this user." - raise e - except HTTPClientError as e: - for elem in etree.fromstring(e.body): - if elem.tag == "Error": - e.msg = elem.get("description") - raise - - -def _insert_user(name, password=None, status=None, email=None, entity=None, **kwargs): - con = get_connection() - params = {"username": name} - - if password is not None: - params["password"] = password - - if status is not None: - params["status"] = status - - if email is not None: - params["email"] = email - - if entity is not None: - params["entity"] = entity - try: - return con.post_form_data(entity_uri_segment="User", params=params, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to insert a new user." - raise e - except HTTPClientError as e: - for elem in etree.fromstring(e.body): - if elem.tag == "Error": - e.msg = elem.get("description") - raise e - - -def _insert_role(name, description, **kwargs): - con = get_connection() - try: - return con.post_form_data(entity_uri_segment="Role", params={"role_name": name, "role_description": description}, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to insert a new role." - raise - except HTTPClientError as e: - if e.status == 409: - e.msg = "Role name is already in use. Choose a different name." - raise - - -def _update_role(name, description, **kwargs): - con = get_connection() - try: - return con.put_form_data(entity_uri_segment="Role/" + name, params={"role_description": description}, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to update this role." - raise - except HTTPResourceNotFoundError as e: - e.msg = "Role does not exist." - raise - - -def _retrieve_role(name, **kwargs): - con = get_connection() - try: - return con._http_request(method="GET", path="Role/" + name, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to retrieve this role." - raise - except HTTPResourceNotFoundError as e: - e.msg = "Role does not exist." - raise - - -def _delete_role(name, **kwargs): - con = get_connection() - try: - return con._http_request(method="DELETE", path="Role/" + name, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to delete this role." - raise - except HTTPResourceNotFoundError as e: - e.msg = "Role does not exist." - raise - - -def _set_roles(username, roles, realm=None, **kwargs): - xml = etree.Element("Roles") - - for r in roles: - xml.append(etree.Element("Role", name=r)) - - body = xml2str(xml) - con = get_connection() - try: - body = con._http_request(method="PUT", path="UserRoles/" + (realm + "/" + - username if realm is not None else username), body=body, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to set this user's roles." - raise - except HTTPResourceNotFoundError as e: - e.msg = "User does not exist." - raise - except HTTPClientError as e: - if e.status == 409: - e.msg = "Role does not exist." - raise - ret = set() - - for r in etree.fromstring(body)[0]: - if r.tag == "Role": - ret.add(r.get("name")) - - return ret - - -def _get_roles(username, realm=None, **kwargs): - con = get_connection() - try: - body = con._http_request(method="GET", path="UserRoles/" + ( - realm + "/" + username if realm is not None else username), **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to retrieve this user's roles." - raise - except HTTPResourceNotFoundError as e: - e.msg = "User does not exist." - raise - ret = set() - - for r in etree.fromstring(body).xpath('/Response/Roles')[0]: - if r.tag == "Role": - ret.add(r.get("name")) - - return ret - - -def _set_permissions(role, permission_rules, **kwargs): - """Set permissions for a role. - -Parameters ----------- - -role : str - The role for which the permissions are set. - -permission_rules : iterable<PermissionRule> - An iterable with PermissionRule objects. - -**kwargs : - Additional arguments which are passed to the HTTP request. - -Returns -------- - None - """ - xml = etree.Element("PermissionRules") - - for p in permission_rules: - xml.append(p._to_xml()) - - body = xml2str(xml) - con = get_connection() - try: - return con._http_request(method="PUT", path="PermissionRules/" + role, body=body, **kwargs).read() - except HTTPForbiddenError as e: - e.msg = "You are not permitted to set this role's permissions." - raise - except HTTPResourceNotFoundError as e: - e.msg = "Role does not exist." - raise - - -def _get_permissions(role, **kwargs): - con = get_connection() - try: - return PermissionRule._parse_body(con._http_request(method="GET", path="PermissionRules/" + role, **kwargs).read()) - except HTTPForbiddenError as e: - e.msg = "You are not permitted to retrieve this role's permissions." - raise - except HTTPResourceNotFoundError as e: - e.msg = "Role does not exist." - raise - - -class PermissionRule(): - """Permission rules. - -Parameters ----------- -action : str - Either "grant" or "deny" - -permission : str - For example ``RETRIEVE:*``. - -priority : bool, optional - Whether the priority shall be set, defaults is False. - """ - - @staticmethod - def _parse_boolean(bstr): - return str(bstr) in ["True", "true", "TRUE", "yes"] - - def __init__(self, action, permission, priority=False): - self._action = action - self._permission = permission - self._priority = PermissionRule._parse_boolean(priority) - - def _to_xml(self): - xml = etree.Element(self._action) - xml.set("permission", self._permission) - - if self._priority is True: - xml.set("priority", "true") - - return xml - - @staticmethod - def _parse_element(elem): - return PermissionRule(elem.tag, elem.get( - "permission"), elem.get("priority")) - - @staticmethod - def _parse_body(body): - xml = etree.fromstring(body) - ret = set() - - for c in xml: - if c.tag in ["Grant", "Deny"]: - ret.add(PermissionRule._parse_element(c)) - - return ret - - def __str__(self): - return str(self._action) + "(" + str(self._permission) + ")" + \ - ("P" if self._priority is True else "") - - def __repr__(self): - return str(self) - - def __hash__(self): - return hash(str(self).lower()) - - def __eq__(self, other): - return str(other).lower() == str(self).lower() +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.administration`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/datatype.py b/src/caosdb/common/datatype.py index 03ff6d023ab0d3005c37d56c65353c1a1072518e..7c61728aba608444ba172df596c0bc3d48c6a89e 100644 --- a/src/caosdb/common/datatype.py +++ b/src/caosdb/common/datatype.py @@ -1,169 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2020 IndiScale GmbH -# Copyright (C) 2020 Henrik tom Wörden, IndiScale GmbH -# Copyright (C) 2020 Daniel Hornung (d.hornung@indiscale.com) -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -import re +from linkahead.common.datatype import * +from warnings import warn -from ..exceptions import EmptyUniqueQueryError, QueryNotUniqueError - -DOUBLE = "DOUBLE" -REFERENCE = "REFERENCE" -TEXT = "TEXT" -DATETIME = "DATETIME" -INTEGER = "INTEGER" -FILE = "FILE" -BOOLEAN = "BOOLEAN" - - -def LIST(datatype): - if hasattr(datatype, "name"): - datatype = datatype.name - - return "LIST<" + str(datatype) + ">" - - -def get_list_datatype(datatype): - """ returns the datatype of the elements in the list """ - if not isinstance(datatype, str): - return None - match = re.match("LIST(<|<)(?P<datatype>.*)(>|>)", datatype) - - if match is not None: - return match.group("datatype") - else: - return None - - -def is_list_datatype(datatype): - """ returns whether the datatype is a list """ - - return get_list_datatype(datatype) is not None - - -def is_reference(datatype): - """Returns whether the value is a reference - - FILE and REFERENCE properties are examples, but also datatypes that are - RecordTypes. - - Parameters - ---------- - datatype : str - The datatype to check. - - Returns - ------- - bool - True if the datatype is a not base datatype or a list of a base datatype. - Otherwise False is returned. - """ - - if datatype is None: - raise ValueError("Cannot decide whether datatype is reference if None" - " is supplied") - - if datatype in [DOUBLE, BOOLEAN, INTEGER, TEXT, DATETIME]: - return False - elif is_list_datatype(datatype): - return is_reference(get_list_datatype(datatype)) - else: - return True - - -def get_referenced_recordtype(datatype): - """Return the record type of the referenced datatype. - - Raises - ------ - ValueError - In cases where datatype is not a reference, the list does not have - a referenced record type or the datatype is a FILE. - - Parameters - ---------- - datatype : str - The datatype to check. - - Returns - ------- - str - String containing the name of the referenced datatype. - """ - - if not is_reference(datatype): - raise ValueError("datatype must be a reference") - - if is_list_datatype(datatype): - datatype = get_list_datatype(datatype) - if datatype is None: - raise ValueError("list does not have a list datatype") - - if datatype == FILE: - raise ValueError( - "FILE references are not considered references with a record type") - - return datatype - - -def get_id_of_datatype(datatype): - """ returns the id of a Record Type - - This is not trivial, as queries may also return children. A check comparing - names is necessary. - - Parameters - ---------- - datatype : string - A datatype, e.g. DOUBLE, or LIST<Person> - - Returns - ------- - The id of the RecordType with the same name as the datatype. - - Raises - ------ - QueryNotUniqueError - If there are more than one entities with the same name as the datatype. - EmptyUniqueQueryError - If there is no entity with the name of the datatype. - """ - from caosdb import execute_query - - if is_list_datatype(datatype): - datatype = get_list_datatype(datatype) - q = "FIND RECORDTYPE {}".format(datatype) - - # we cannot use unique=True here, because there might be subtypes - res = execute_query(q) - res = [el for el in res if el.name.lower() == datatype.lower()] - - if len(res) > 1: - raise QueryNotUniqueError( - "Name {} did not lead to unique result; Missing " - "implementation".format(datatype)) - elif len(res) != 1: - raise EmptyUniqueQueryError( - "No RecordType named {}".format(datatype)) - - return res[0].id +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.datatype`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/models.py b/src/caosdb/common/models.py index 18175043d842eca784cc353119c1cf796d4793d5..4889685d487dcdd7642b8f573091a754a8bc95ce 100644 --- a/src/caosdb/common/models.py +++ b/src/caosdb/common/models.py @@ -1,4867 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020-2023 Indiscale GmbH <info@indiscale.com> -# Copyright (C) 2020-2023 Florian Spreckelsen <f.spreckelsen@indiscale.com> -# Copyright (C) 2020-2022 Timm Fitschen <t.fitschen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# -""" -Collection of the central classes of the CaosDB client, namely the Entity class -and all of its subclasses and the Container class which is used to carry out -transactions. - -All additional classes are either important for the entities or the -transactions. -""" - -from __future__ import annotations # Can be removed with 3.10. -from __future__ import print_function, unicode_literals - -import re -import sys -from builtins import str -from copy import deepcopy -from functools import cmp_to_key -from hashlib import sha512 -from os import listdir -from os.path import isdir -from random import randint -from tempfile import NamedTemporaryFile -from typing import Any, Optional +from linkahead.common.models import * from warnings import warn -from caosdb.common.datatype import (BOOLEAN, DATETIME, DOUBLE, INTEGER, TEXT, - is_list_datatype, is_reference) -from caosdb.common.state import State -from caosdb.common.timezone import TimeZone -from caosdb.common.utils import uuid, xml2str -from caosdb.common.versioning import Version -from caosdb.configuration import get_config -from caosdb.connection.connection import get_connection -from caosdb.connection.encode import MultipartParam, multipart_encode -from caosdb.exceptions import (AmbiguousEntityError, AuthorizationError, - CaosDBConnectionError, CaosDBException, - ConsistencyError, EmptyUniqueQueryError, - EntityDoesNotExistError, EntityError, - EntityHasNoDatatypeError, HTTPURITooLongError, - MismatchingEntitiesError, QueryNotUniqueError, - TransactionError, UniqueNamesError, - UnqualifiedParentsError, PagingConsistencyError, - UnqualifiedPropertiesError) -from lxml import etree - -_ENTITY_URI_SEGMENT = "Entity" - -# importances/inheritance -OBLIGATORY = "OBLIGATORY" -SUGGESTED = "SUGGESTED" -RECOMMENDED = "RECOMMENDED" -FIX = "FIX" -ALL = "ALL" -NONE = "NONE" - - -SPECIAL_ATTRIBUTES = ["name", "role", "datatype", "description", - "id", "path", "checksum", "size"] - - -class Entity: - - """Entity is a generic CaosDB object. - - The majority of all methods of the derived classes (e.g. Record, - RecordType, Property ...) are defined here, e.g. add_property, - add_parent, retrieve ... Each entity may have some attributes (id, - name, description, ...), a set of properties, a set of parent - entities and a set of messages which are generated through the - processing in the client library or the server, or which can be used - by the user to control several server-side plug-ins. - """ - - def __init__(self, name=None, id=None, description=None, # @ReservedAssignment - datatype=None, value=None, **kwargs): - self.__role = kwargs["role"] if "role" in kwargs else None - self._checksum = None - self._size = None - self._upload = None - # If an entity is used (e.g. as parent), it is wrapped instead of being used directly. - # see Entity._wrap() - self._wrapped_entity = None - self._version = None - self._cuid = None - self._flags = dict() - self.__value = None - self.__datatype = None - self.datatype = datatype - self.value = value - self.messages = Messages() - self.properties = _Properties() - self.parents = _ParentList() - self.path = None - self.file = None - self.unit = None - self.acl = None - self.permissions = None - self.is_valid = lambda: False - self.is_deleted = lambda: False - self.name = name - self.description = description - self.id = id - self.state = None - - def copy(self): - """ - Return a copy of entity. - - If deep == True return a deep copy, recursively copying all sub entities. - - Standard properties are copied using add_property. - Special attributes, as defined by the global variable SPECIAL_ATTRIBUTES and additionaly - the "value" are copied using setattr. - """ - if self.role == "File": - new = File() - elif self.role == "Property": - new = Property() - elif self.role == "RecordType": - new = RecordType() - elif self.role == "Record": - new = Record() - elif self.role == "Entity": - new = Entity() - else: - raise RuntimeError("Unkonwn role.") - - # Copy special attributes: - # TODO: this might rise an exception when copying - # special file attributes like checksum and size. - for attribute in SPECIAL_ATTRIBUTES + ["value"]: - val = getattr(self, attribute) - if val is not None: - setattr(new, attribute, val) - - # Copy parents: - for p in self.parents: - new.add_parent(p) - - # Copy properties: - for p in self.properties: - new.add_property(p, importance=self.get_importance(p)) - - return new - - @property - def version(self): - if self._version is not None or self._wrapped_entity is None: - return self._version - - return self._wrapped_entity.version - - @version.setter - def version(self, version): - self._version = version - - @property - def role(self): - return self.__role - - @role.setter - def role(self, role): - if role is not None and role.lower() == "entity": - self.__role = None - else: - self.__role = role - - @property - def size(self): - if self._size is not None: - return int(self._size) - - if self._wrapped_entity is None: - return None - - return self._wrapped_entity.size - - @property - def id(self): - if self.__id is not None: - return self.__id - - if self._wrapped_entity is None: - return None - - return self._wrapped_entity.id - - @id.setter - def id(self, new_id): - if new_id is not None: - self.__id = int(new_id) - else: - self.__id = None - - @property - def name(self): - if self.__name is not None or self._wrapped_entity is None: - return self.__name - - return self._wrapped_entity.name - - @name.setter - def name(self, new_name): - self.__name = new_name - - @property - def datatype(self): - if self.__datatype is not None or self._wrapped_entity is None: - return self.__datatype - - return self._wrapped_entity.datatype - - @datatype.setter - def datatype(self, new_type): - # re-parse value - self.__value = _parse_value(new_type, self.__value) - self.__datatype = new_type - - @property - def description(self): - if self.__description is not None or self._wrapped_entity is None: - return self.__description - - return self._wrapped_entity.description - - @property - def checksum(self): - return self._checksum - - @description.setter - def description(self, new_description): - self.__description = new_description - - @property - def unit(self): - if self.__unit is not None or self._wrapped_entity is None: - return self.__unit - - return self._wrapped_entity.unit - - @unit.setter - def unit(self, new_unit): - self.__unit = new_unit - - @property - def value(self): - if self.__value is not None or self._wrapped_entity is None: - return self.__value - - return self._wrapped_entity.value - - @value.setter - def value(self, new_value): - self.__value = _parse_value(self.datatype, new_value) - - @property - def path(self): - if self.__path is not None or self._wrapped_entity is None: - return self.__path - - return self._wrapped_entity.path - - @path.setter - def path(self, new_path): - self.__path = new_path - - @property - def thumbnail(self): - if self.__thumbnail is not None or self._wrapped_entity is None: - return self.__thumbnail - - return self._wrapped_entity.thumbnail - - @thumbnail.setter - def thumbnail(self, new_thumbnail): - self.__thumbnail = new_thumbnail - - @property - def file(self): - if self.__file is not None or self._wrapped_entity is None: - return self.__file - - return self._wrapped_entity.file - - @file.setter - def file(self, new_file): - self.__file = new_file - - @property - def pickup(self): - if self.__pickup is not None or self._wrapped_entity is None: - return self.__pickup - - return self._wrapped_entity.pickup - - @pickup.setter - def pickup(self, new_pickup): - self.__pickup = new_pickup - - def grant(self, realm=None, username=None, role=None, - permission=None, priority=False, revoke_denial=True): - """Grant a permission to a user or role for this entity. - - You must specify either only the username and the realm, or only the - role. - - By default a previously existing denial rule would be revoked, because - otherwise this grant wouldn't have any effect. However, for keeping - contradicting rules pass revoke_denial=False. - - Parameters - ---------- - permission: str - The permission to be granted. - username : str, optional - The username. Exactly one is required, either the `username` or the - `role`. - realm: str, optional - The user's realm. Required when username is not None. - role: str, optional - The role (as in Role-Based Access Control). Exactly one is - required, either the `username` or the `role`. - priority: bool, default False - Whether this permission is granted with priority over non-priority - rules. - revoke_denial: bool, default True - Whether a contradicting denial (with same priority flag) in this - ACL will be revoked. - """ - # @review Florian Spreckelsen 2022-03-17 - self.acl.grant(realm=realm, username=username, role=role, - permission=permission, priority=priority, - revoke_denial=revoke_denial) - - def deny(self, realm=None, username=None, role=None, - permission=None, priority=False, revoke_grant=True): - """Deny a permission to a user or role for this entity. - - You must specify either only the username and the realm, or only the - role. - - By default a previously existing grant rule would be revoked, because - otherwise this denial would override the grant rules anyways. However, - for keeping contradicting rules pass revoke_grant=False. - - Parameters - ---------- - permission: str - The permission to be denied. - username : str, optional - The username. Exactly one is required, either the `username` or the - `role`. - realm: str, optional - The user's realm. Required when username is not None. - role: str, optional - The role (as in Role-Based Access Control). Exactly one is - required, either the `username` or the `role`. - priority: bool, default False - Whether this permission is denied with priority over non-priority - rules. - revoke_grant: bool, default True - Whether a contradicting grant (with same priority flag) in this - ACL will be revoked. - """ - # @review Florian Spreckelsen 2022-03-17 - self.acl.deny(realm=realm, username=username, role=role, - permission=permission, priority=priority, - revoke_grant=revoke_grant) - - def revoke_denial(self, realm=None, username=None, - role=None, permission=None, priority=False): - self.acl.revoke_denial( - realm=realm, - username=username, - role=role, - permission=permission, - priority=priority) - - def revoke_grant(self, realm=None, username=None, - role=None, permission=None, priority=False): - self.acl.revoke_grant( - realm=realm, - username=username, - role=role, - permission=permission, - priority=priority) - - def is_permitted(self, permission, role=None): - if role is None: - # pylint: disable=unsupported-membership-test - - return permission in self.permissions - else: - self.acl.is_permitted(permission=permission) - - def get_all_messages(self): - ret = Messages() - ret.append(self.messages) - - for p in self.properties: - ret.extend(p.get_all_messages()) - - for p in self.parents: - ret.extend(p.get_all_messages()) - - return ret - - def clear_server_messages(self): - self.messages.clear_server_messages() - - for p in self.properties: - p.clear_server_messages() - - for p in self.parents: - p.clear_server_messages() - - return self - - def get_importance(self, property): # @ReservedAssignment - """Get the importance of a given property regarding this entity.""" - - if self.properties is not None: - return self.properties.get_importance(property) - - def remove_property(self, property): # @ReservedAssignment - self.properties.remove(property) - - return self - - def remove_value_from_property(self, property_name: str, value: Any, - remove_if_empty_afterwards: Optional[bool] = True): - """Remove a value from a property given by name. - - Do nothing if this entity does not have a property of this - ``property_name`` or if the property value is different of the given - ``value``. By default, the property is removed from this entity if it - becomes empty (i.e., value=None) through removal of the value. This - behavior can be changed by setting ``remove_if_empty_afterwards`` to - ``False`` in which case the property remains. - - Notes - ----- - If the property value is a list and the value to be removed occurs more - than once in this list, only its first occurrance is deleted (similar - to the behavior of Python's ``list.remove()``.) - - If the property was empty (prop.value == None) before, the property is - not removed afterwards even if ``remove_if_empty_afterwards`` is set to - ``True``. Rationale: the property being empty is not an effect of - calling this function. - - Parameters - ---------- - property_name : str - Name of the property from which the ``value`` will be removed. - - value - Value that is to be removed. - - remove_if_empty_afterwards : bool, optional - Whether the property shall be removed from this entity if it is - emptied by removing the ``value``. Default is ``True``. - - Returns - ------- - self - This entity. - - """ - - if self.get_property(property_name) is None: - return self - if self.get_property(property_name).value is None: - remove_if_empty_afterwards = False - empty_afterwards = False - if isinstance(self.get_property(property_name).value, list): - if value in self.get_property(property_name).value: - self.get_property(property_name).value.remove(value) - if self.get_property(property_name).value == []: - self.get_property(property_name).value = None - empty_afterwards = True - elif self.get_property(property_name).value == value: - self.get_property(property_name).value = None - empty_afterwards = True - if remove_if_empty_afterwards and empty_afterwards: - self.remove_property(property_name) - - return self - - def remove_parent(self, parent): - self.parents.remove(parent) - - return self - - def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, - unit=None, importance=None, inheritance=None): # @ReservedAssignment - """Add a property to this entity. - - The first parameter is meant to identify the property entity either via - its id or name, or by providing the corresponding ``Entity`` Python - object. The second parameter is the value of the new property. Any other - named parameter may be passed by means of the keywwords. Accepted - keywords are: id, name, description, importance, inheritance, datatype, - and unit. - - Notes - ----- - If you want to add a property to an already existing entity, the - property ``id`` of that property needs to be specified before you send - the updated entity to the server. - - Parameters - ---------- - property : int, str, Entity, optional - An identifier for the property to be added, either its name, its id, - or the corresponding Entity Python object. If ``None``, either the - `name` or the `id` argument have to be specified explicitly. Default - is ``None``. - value : int, str, bool, datetime, Entity, or list of these types, optional - The value of the new property. In case of a reference to another - entity, this value may be the referenced entities id or the - ``Entity`` as a Python object. Default is None. - id : int, optional - Id of the property, by default None - name : str, optional - Name of the property, by default None - description : str, optional - Description of the property, by default None - datatype : str, optional - Datatype of the property, by default None - unit : str, optional - Unit of the property, by default None - importance :str, optional - Importance of the property, by default None - inheritance : str, optional - Inheritance of the property, by default None - - Returns - ------- - Entity - This Entity object to which the new property has been added. - - Warns - ----- - UserWarning - If the first parameter is None then id or name must be defined and not be None. - UserWarning - If the first parameter is an integer then it is interpreted as the id and id must be - undefined or None. - UserWarning - If the first parameter is not None and neither an instance of Entity nor an integer it is - interpreted as the name and name must be undefined or None. - - Raises - ------ - ValueError: - If you try to add an ``Entity`` object with File or Record role (or, - equivalently, a ``File`` or ``Record`` object) as a property, a - ``ValueError`` is raised. - - Examples - -------- - Add a simple integer property with the name ``TestProp`` and the value - 27 to a Record: - - >>> import caosdb as db - >>> rec = db.Record(name="TestRec").add_parent(name="TestType") - >>> rec.add_property("TestProp", value=27) # specified by name, you could equally use the property's id if it is known - - You can also use the Python object: - - >>> prop = db.Property(name="TestProp", datatype=db.INTEGER) - >>> rec.add_property(prop, value=27) # specified via the Python object - - In case of updating an existing Record, the Property needs to be - specified by id: - - >>> rec = db.Record(name="TestRec").retrieve() - >>> prop2 = db.Property(name="OtherTestProp").retrieve() - >>> rec.add_property(id=prop2.id, value="My new value") - >>> rec.update() - - Let's look at the more advanced example of adding a list of integers as - value of the above integer ``TestProp``: - - >>> rec.add_property("TestProp", value=[27,28,29], datatype=db.LIST(db.INTEGER)) - - Note that since `TestProp` is a scalar integer Property, the datatype - `LIST<INTEGER>` has to be specified explicitly. - - Finally, we can also add reference properties, specified by the RecordType of the referenced entity. - - >>> ref_rec = db.Record(name="ReferencedRecord").add_parent(name="OtherRT") - >>> rec.add_property(name="OtherRT", value=ref_rec) # or value=ref_rec.id if ref_rec has one set by the server - - See more on adding properties and inserting data in - https://docs.indiscale.com/caosdb-pylib/tutorials/Data-Insertion.html. - - """ - - pid = id - abstract_property = None - - if isinstance(property, Entity): - if property.role is not None and property.role.lower() in ["record", "file"]: - raise ValueError("The property parameter is a {0}. This " - "is very unusual and probably not what you " - "want. Otherwise, construct a property from " - "a {0} using the Property class and add " - "that to this entity.".format(property.role)) - abstract_property = property - elif isinstance(property, int): - if pid is not None: - raise UserWarning("The first parameter was an integer which would normally be interpreted as the id of the property which is to be added. But you have also specified a parameter 'id' in the method call. This is ambiguous and cannot be processed.") - pid = property - id = pid - elif property is not None: - if name is not None: - raise UserWarning("The first parameter was neither an instance of Entity nor an integer. Therefore the string representation of your first parameter would normally be interpreted name of the property which is to be added. But you have also specified a parameter 'name' in the method call. This is ambiguous and cannot be processed.") - name = str(property) - - if property is None and name is None and pid is None: - raise UserWarning( - "This method expects you to pass at least an entity, a name or an id.") - - new_property = Property(name=name, id=id, description=description, datatype=datatype, - value=value, unit=unit) - - if abstract_property is not None: - new_property._wrap(abstract_property) - - # FIXME: this really necessary? - - if new_property.datatype is None and isinstance( - property, (RecordType, Record, File)): - new_property.datatype = property - new_property.value = value - - self.properties.append( - property=new_property, importance=importance, inheritance=inheritance) - - return self - - def add_message(self, msg=None, type=None, code=None, # @ReservedAssignment - description=None, body=None): - """Add a message (msg) to this entity. If and only if no msg is given - this method will created a new message from the parameters type, code, - description, and body. - - @param msg: The message to be added to this entity. - @param type: The type of the message to be added. - @param code: The code of the message to be added. - @param description: The description of the message to be added. - @param body: The body of the message to be added. - """ - - if msg is not None: - pass - else: - msg = Message(description=description, type=type, code=code, - body=body) - self.messages.append(msg) - - return self - - def add_parent(self, parent=None, id=None, name=None, inheritance=None): # @ReservedAssignment - """Add a parent to this entity. - - Parameters - ---------- - parent : Entity or int or str or None - The parent entity, either specified by the Entity object - itself, or its id or its name. Default is None. - id : int - Integer id of the parent entity. Ignored if `parent` - is not None. - name : str - Name of the parent entity. Ignored if `parent is not - none`. - inheritance : str - One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the - minimum importance which parent properties need to have to be inherited by this - entity. If no `inheritance` is given, no properties will be inherited by the child. - This parameter is case-insensitive. - - Notes - ----- - Note that the behaviour of the `inheritance` argument currently has not - yet been specified when assigning parents to Records, it only works for - inheritance of RecordTypes (and Properties). For more information, it is - recommended to look into the :ref:`data insertion - tutorial<tutorial-inheritance-properties>`. - - Raises - ------ - UserWarning - If neither a `parent` parameter, nor the `id`, nor `name` - parameter is passed to this method. - - """ - - pid = id - parent_entity = None - - if isinstance(parent, Entity): - parent_entity = parent - elif isinstance(parent, int): - pid = parent - elif parent is not None: - name = str(parent) - - if pid is None and name is None and parent_entity is None: - raise UserWarning( - "This method expects you to pass at least an entity, a name or an id.") - - addp = Parent(id=pid, name=name, inheritance=inheritance) - - if parent_entity is not None: - addp._wrap(parent_entity) - self.parents.append(addp) - - return self - - def has_parent(self, parent: Entity, recursive: bool = True, retrieve: bool = True, - check_name: bool = True, check_id: bool = False): - """Check if this entity has a given parent. - - If 'check_name' and 'check_id' are both False, test for identity - on the Python level. Otherwise use the name and/or ID for the - check. Note that, if checked, name or ID should not be None, - lest the check fail. - -Parameters ----------- - -parent: Entity - Check for this parent. - -recursive: bool, optional - Whether to check recursively. - -check_name: bool, optional - Whether to use the name for ancestry check. - -check_id: bool, optional - Whether to use the ID for ancestry check. - -retrieve: bool, optional - If False, do not retrieve parents from the server. - -Returns -------- -out: bool - True if ``parent`` is a true parent, False otherwise. -""" - - if recursive: - parents = self.get_parents_recursively(retrieve=retrieve) - else: - if retrieve: - parents = [pp.retrieve()._wrapped_entity for pp in self.parents] - else: - parents = [pp._wrapped_entity for pp in self.parents] - - if not (check_name or check_id): - return parent in parents - - name_result = ( - not check_name or - (parent.name is not None and - parent.name in [pp.name for pp in parents])) - id_result = ( - not check_id or - (parent.id is not None and - parent.id in [pp.id for pp in parents])) - - return name_result and id_result - - def get_parents(self): - """Get all parents of this entity. - - @return: _ParentList(list) - """ - - return self.parents - - def get_parents_recursively(self, retrieve: bool = True): - """Get all ancestors of this entity. - -Parameters ----------- - -retrieve: bool, optional - If False, do not retrieve parents from the server. - -Returns -------- -out: List[Entity] - The parents of this Entity -""" - - all_parents = [] - self._get_parent_recursively(all_parents, retrieve=retrieve) - - return all_parents - - def _get_parent_recursively(self, all_parents: list, retrieve: bool = True): - """Get all ancestors with a little helper. - - As a side effect of this method, the ancestors are added to - all_parents. - - @param all_parents: list, The added parents so far. - - @return: None, but see side effects. - """ - - for parent in self.parents: - # TODO: - # Comment on _wrap and _wrapped_entity - # Currently, I (henrik) do not why the wrapping is necessary (and it is not - # documented). However, the following illustrates, why I think, it is a bad idea. - # First you add a parent with rec.add_parent(parent), but then you cannot access - # attributes of parent when you use rec.parents[0] for example becasue you do not get - # the same object but a wrapping object and you need to know that you only get the - # original by accessing the private (!) _wrapped_entity object. - w_parent = parent._wrapped_entity - if retrieve: - parent.retrieve() - for next_parent in parent.parents: - w_parent.add_parent(next_parent) - - if (w_parent.id, w_parent.name) not in [ - (all_p.id, all_p.name) for all_p in all_parents]: - all_parents.append(w_parent) - w_parent._get_parent_recursively(all_parents, retrieve=retrieve) - - def get_parent(self, key): - """Return the first parent matching the key or None if no match exists. - - Parameters - --------- - key : int or Enity or str - The id, Entity, or name of the parent that should be - returned. If an Entity is given, its id or its name is - used to find a matching parent. - - Returns - ------- - parent : Entity - The first parent of this entity that matches the given id, - entity, or name. - - """ - - if isinstance(key, int): - for p in self.parents: - if p.id is not None and int(p.id) == int(key): - return p - elif isinstance(key, Entity): - if key.id is not None: - # first try by id - found = self.get_parent(int(key.id)) - - if found is not None: - return found - # otherwise by name - - return self.get_parent(key.name) - else: - for p in self.parents: - if (p.name is not None - and str(p.name).lower() == str(key).lower()): - - return p - - return None - - def get_properties(self): - """Get all properties of this entity. - - @return: _Properties(list) - """ - - return self.properties - - def get_property(self, pattern): - """ Return the first matching property or None. - - Parameters - ---------- - pattern : str or int or Entity - The name or id to look for (case-insensitive) or an Entity where - the name or id is used to match the properites of this instance. - - Returns - ------- - property : Property - The first Property of this Entity with a matching name or id. - - """ - # entity given - - if (hasattr(pattern, "name") or hasattr(pattern, "id")): - # only return if a result was found, otherwise use id - - if (hasattr(pattern, "name") and pattern.name is not None - and self.get_property(pattern.name) is not None): - - return self.get_property(pattern.name) - - if hasattr(pattern, "id") and pattern.id is not None: - return self.get_property(pattern.id) - - # int given - elif isinstance(pattern, int): - for p in self.properties: - if p.id is not None and int(p.id) == int(pattern): - return p - # str given - elif isinstance(pattern, str): - for p in self.properties: - if (p.name is not None - and str(p.name).lower() == str(pattern).lower()): - - return p - else: - raise ValueError("argument should be entity, int , string") - - return None - - def _get_value_for_selector(self, selector): - """return the value described by the selector - - A selector is a list or a tuple of strings describing a path in an - entity tree with self as root. The last selector may be a special one - like unit or name. - - See also get_property_values() - """ - SPECIAL_SELECTORS = ["unit", "value", "description", "id", "name"] - - if not isinstance(selector, (tuple, list)): - selector = [selector] - - ref = self - - # there are some special selectors which can be applied to the - # final element; if such a special selector exists we split it - # from the list - - if selector[-1].lower() in SPECIAL_SELECTORS: - special_selector = selector[-1] - selector = selector[:-1] - else: - special_selector = None - - # iterating through the entity tree according to the selector - - for subselector in selector: - # selector does not match the structure, we cannot get a - # property of non-entity - - if not isinstance(ref, Entity): - return None - - prop = ref.get_property(subselector) - - # selector does not match the structure, we did not get a - # property - - if prop is None: - return None - - # if the property is a reference, we are interested in the - # corresponding entities attributes - - if isinstance(prop.value, Entity): - ref = prop.value - - # otherwise in the attributes of the property - else: - ref = prop - - # if we saved a special selector before, apply it - - if special_selector is None: - return prop.value - else: - return getattr(ref, special_selector.lower()) - - def get_property_values(self, *selectors): - """ Return a tuple with the values described by the given selectors. - - This represents an entity's properties as if it was a row of a table - with the given columns. - - If the elements of the selectors parameter are tuples, they will return - the properties of the referenced entity, if present. E.g. ("window", - "height") will return the value of the height property of the - referenced window entity. - - The tuple's values correspond to the order of selectors parameter. - - The tuple contains None for all values that are not available in the - entity. That does not necessarily mean, that the values are not stored - in the database (e.g. if a single entity was retrieved without - referenced entities). - - Parameters - ---------- - *selectors : str or tuple of str - Each selector is a list or tuple of property names, e.g. `"height", - "width"`. - - Returns - ------- - row : tuple - A row-like representation of the entity's properties. - """ - row = tuple() - - for selector in selectors: - val = self._get_value_for_selector(selector) - - if isinstance(val, Entity): - val = val.id if val.id is not None else val.name - row += (val,) - - return row - - def get_messages(self): - """Get all messages of this entity. - - @return: Messages(list) - """ - - return self.messages - - def get_warnings(self): - """Get all warning messages of this entity. - - @return Messages(list): Warning messages. - """ - ret = Messages() - - for m in self.messages: - if m.type.lower() == "warning": - ret.append(m) - - return ret - - def get_errors(self): - """Get all error messages of this entity. - - @return Messages(list): Error messages. - """ - ret = Messages() - - for m in self.messages: - if m.type.lower() == "error": - ret.append(m) - - if self._wrapped_entity is not None: - ret.extend(self._wrapped_entity.get_errors()) - - return ret - - def get_errors_deep(self, roots=None): - """Get all error messages of this entity and all sub-entities / - parents / properties. - - @return A list of tuples. Tuple index 0 contains the error message - and tuple index 1 contains the tree. - """ - roots = [] if roots is None else roots - result_list = list() - ret_self = self.get_errors() - result_list.extend([ - (m, roots) for m in ret_self]) - - for parent in self.get_parents(): - result_list.extend( - parent.get_errors_deep( - roots + [parent])) - - return result_list - - def has_errors(self): - ''' - @return True: if and only if this entities has any error messages. - ''' - - for m in self.messages: - if m.type.lower() == "error": - return True - - return False - - def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): - """Generate an xml representation of this entity. If the parameter xml - is given, all attributes, parents, properties, and messages of this - entity will be added to it instead of creating a new element. - - Raise an error if xml is not a lxml.etree.Element - - @param xml: an xml element to which all attributes, parents, - properties, and messages - are to be added. - @return: xml representation of this entity. - """ - - if xml is None: - # use role as xml tag name, fall-back to "Entity" - elem_tag = "Entity" if self.role is None else self.role - xml = etree.Element(elem_tag) - assert isinstance(xml, etree._Element) - - # unwrap wrapped entity - - if self._wrapped_entity is not None: - xml = self._wrapped_entity.to_xml(xml, add_properties) - - if self.id is not None: - xml.set("id", str(self.id)) - - if self._cuid is not None: - xml.set("cuid", str(self._cuid)) - - if self.name is not None: - xml.set("name", str(self.name)) - - if self.description is not None: - xml.set("description", str(self.description)) - - if self.version is not None: - xml.append(self.version.to_xml()) - - if self.value is not None: - if isinstance(self.value, Entity): - if self.value.id is not None: - xml.text = str(self.value.id) - elif self.value.name is not None: - xml.text = str(self.value.name) - else: - xml.text = str(self.value) - elif isinstance(self.value, list): - for v in self.value: - v_elem = etree.Element("Value") - - if isinstance(v, Entity): - if v.id is not None: - v_elem.text = str(v.id) - elif v.name is not None: - v_elem.text = str(v.name) - else: - v_elem.text = str(v) - elif v == "": - v_elem.append(etree.Element("EmptyString")) - elif v is None: - pass - else: - v_elem.text = str(v) - xml.append(v_elem) - elif self.value == "": - xml.append(etree.Element("EmptyString")) - elif str(self.value) == "nan": - xml.text = "NaN" - else: - xml.text = str(self.value) - - if self.datatype is not None: - if isinstance(self.datatype, Entity): - if self.datatype.id is not None: - xml.set("datatype", str(self.datatype.id)) - elif self.datatype.name is not None: - xml.set("datatype", str(self.datatype.name)) - else: - xml.set("datatype", str(self.datatype)) - else: - xml.set("datatype", str(self.datatype)) - - if self.path is not None: - xml.set("path", self.path) - - if self.file is not None and local_serialization: - xml.set("file", self.file) - - if self._checksum is not None: - xml.set("checksum", self._checksum) - - if self.size is not None: - xml.set("size", str(self.size)) - - if self.unit is not None: - xml.set("unit", str(self.unit)) - - if self.messages is not None: - self.messages.to_xml(xml) - - if self.parents is not None: - self.parents.to_xml(xml) - - if self.properties is not None: - self.properties.to_xml(xml, add_properties) - - if len(self._flags) > 0: - flagattr = "" - - for key in self._flags.keys(): - flag = self._flags[key] - - if flag is not None and flag != "": - flagattr += str(key) + ":" + str(flag) + "," - else: - flagattr += str(key) + "," - xml.set("flag", flagattr) - - if self.acl is not None: - xml.append(self.acl.to_xml()) - - if self.state is not None: - xml.append(self.state.to_xml()) - - return xml - - @staticmethod - def _from_xml(entity, elem): - """Parse a single string representation of an xml element to an entity. - - @param entity: the entity - @param elem: the xml element - """ - - if isinstance(entity, Entity): - entity.role = elem.tag - entity._cuid = elem.get("cuid") - entity.id = elem.get("id") # @ReservedAssignment - entity.name = elem.get("name") - entity.description = elem.get("description") - entity.path = elem.get("path") - entity._checksum = elem.get("checksum") - entity._size = elem.get("size") - entity.datatype = elem.get("datatype") # @ReservedAssignment - entity.unit = elem.get("unit") - entity.file = elem.get("file") - - if hasattr(entity, "affiliation"): - entity.affiliation = elem.get("affiliation") - - vals = list() - - for celem in elem: - - child = _parse_single_xml_element(celem) - - if isinstance(child, Property): - entity.properties.append(property=child, - importance=celem.get("importance"), - inheritance=None) - elif isinstance(child, Parent): - entity.add_parent(child) - elif isinstance(child, ACL): - entity.acl = child - elif isinstance(child, Permissions): - entity.permissions = child - elif isinstance(child, Message): - entity.add_message(child) - elif isinstance(child, Version): - entity.version = child - elif isinstance(child, State): - entity.state = child - elif child is None or hasattr(child, "encode"): - vals.append(child) - elif isinstance(child, Entity): - vals.append(child) - else: - raise TypeError( - 'Child was neither a Property, nor a Parent, nor a Message.\ - Was ' + str(type(child)) + "\n" + str(child)) - - # add VALUE - value = None - - if vals: - # The value[s] have been inside a <Value> tag. - value = vals - elif elem.text is not None and elem.text.strip() != "": - value = elem.text.strip() - - try: - entity.value = value - except ValueError: - # circumvent the parsing. - entity.__value = value - - return entity - - def __repr__(self): - return xml2str(self.to_xml()) - - def retrieve_acl(self): - self.acl = Entity(name=self.name, id=self.id).retrieve( - flags={"ACL": None}).acl - - def update_acl(self): - if self.id is None: - c = Container().retrieve(query=self.name, sync=False) - - if len(c) == 1: - e = c[0] - elif len(c) == 0: - ee = EntityDoesNotExistError( - "The entity to be updated does not exist on the server.", - entity=self - ) - raise TransactionError(ee) - else: - ae = AmbiguousEntityError( - "Could not determine the desired Entity which is to be updated by its name.", - entity=self - ) - raise TransactionError(ae) - else: - e = Container().retrieve(query=self.id, sync=False)[0] - e.acl = ACL(self.acl.to_xml()) - e.update() - - return e - - def delete(self, raise_exception_on_error=True): - return Container().append(self).delete( - raise_exception_on_error=raise_exception_on_error)[0] - - def retrieve(self, unique=True, raise_exception_on_error=True, flags=None): - """Retrieve this entity identified via its id if present and via its - name otherwise. Any locally already existing attributes (name, - description, ...) will be preserved. Any such properties and parents - will be synchronized as well. They will not be overridden. This method - returns a Container containing the this entity. - - Note: If only a name is given this could lead to ambiguities. Usually - this would raise a CaosDBException. Set the flag 'unique' to False if - this Exception should be suppressed. If unique is False this method - returns a Container object which carries the returned entities. They are - distinct from this one. This entity will no be changed somehow. - - @param unique=True: flag to suppress the ambiguity exception. - - @return - Container with the returned entities or single entity if and only - if unique was True and no exception was raised. - - """ - - if unique: - c = Container().append(self).retrieve( - unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) - - if len(c) == 1: - c[0].messages.extend(c.messages) - - return c[0] - - raise QueryNotUniqueError("This retrieval was not unique!!!") - - return Container().append(self).retrieve( - unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) - - def insert(self, raise_exception_on_error=True, unique=True, - sync=True, strict=False, flags=None): - """Insert this entity into a CaosDB server. A successful insertion will - generate a new persistent ID for this entity. This entity can be - identified, retrieved, updated, and deleted via this ID until it has - been deleted. - - If the insertion fails, a CaosDBException will be raised. The server will have returned at - least one error-message describing the reason why it failed in that case (call - <this_entity>.get_all_messages() in order to get these error-messages). - - Some insertions might cause warning-messages on the server-side, but the entities are inserted - anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. - This prevents the server from inserting this entity if any warning occurs. - - Parameters - ---------- - strict : bool, optional - Flag for strict mode. Default is False. - raise_exception_on_error : bool, optional - Flag to raise an exception when an error occurs. Default is True. - unique : bool, optional - Flag to only allow insertion of elements with unique names. Default - is True. - flags : dict, optional - A dictionary of flags to be send with the insertion. Default is - None. - - """ - - return Container().append(self).insert( - strict=strict, - raise_exception_on_error=raise_exception_on_error, - unique=unique, - sync=sync, - flags=flags)[0] - - def update(self, strict=False, raise_exception_on_error=True, - unique=True, flags=None, sync=True): - """Update this entity. - -There are two possible work-flows to perform this update: -First: - 1) retrieve an entity - 2) do changes - 3) call update method - -Second: - 1) construct entity with id - 2) call update method. - - For slight changes the second one it is more comfortable. Furthermore, it is possible to stay - off-line until calling the update method. The name, description, unit, datatype, path, - and value of an entity may be changed. Additionally, properties, parents and messages may be added. - - However, the first one is more powerful: It is possible to delete and change properties, parents - and attributes, which is not possible via the second one for internal reasons (which are reasons - of definiteness). - - If the update fails, a CaosDBException will be raised. The server will have returned at - least one error message describing the reason why it failed in that case (call - <this_entity>.get_all_messages() in order to get these error-messages). - - Some updates might cause warning messages on the server-side, but the updates are performed - anyway. Set flag 'strict' to True in order to force the server to take all warnings as errors. - This prevents the server from updating this entity if any warnings occur. - - @param strict=False: Flag for strict mode. - """ - - return Container().append(self).update( - strict=strict, - sync=sync, - raise_exception_on_error=raise_exception_on_error, - unique=unique, - flags=flags)[0] - - def _wrap(self, entity): - """ - When entity shall be used as parent or property it is not added to the corresponding list - (such as the parent list) directly, but another Entity object is created and the original - Entity is wrapped using this function - TODO: document here and in dev docs why this is done. - """ - self._wrapped_entity = entity - - return self - - def set_flag(self, key, value=None): - self._flags[key] = value - - return self - - -def _parse_value(datatype, value): - """Parse the value (from XML input) according to the given datatype - """ - - # Simple values - if value is None: - return value - - if datatype is None: - return value - - if datatype == DOUBLE: - return float(value) - - if datatype == INTEGER: - return int(str(value)) - - if datatype == BOOLEAN: - if str(value).lower() == "true": - return True - elif str(value).lower() == "false": - return False - else: - raise ValueError("Boolean value was {}.".format(value)) - - # Datetime and text are returned as-is - if datatype in [DATETIME, TEXT]: - if isinstance(value, str): - return value - - # deal with collections - if isinstance(datatype, str): - matcher = re.compile(r"^(?P<col>[^<]+)<(?P<dt>[^>]+)>$") - m = matcher.match(datatype) - - if m: - col = m.group("col") - dt = m.group("dt") - - if col == "LIST": - ret = list() - else: - return value - - if hasattr(value, "__iter__") and not isinstance(value, str): - for v in value: - ret.append(_parse_value(dt, v)) - else: - # put a single value into a list since the datatype says so. - ret.append(_parse_value(dt, value)) - - return ret - - # This is for a special case, where the xml parser could not differentiate - # between single values and lists with one element. As - if hasattr(value, "__len__") and len(value) == 1: - return _parse_value(datatype, value[0]) - - # deal with references - if isinstance(value, Entity): - return value - - if isinstance(value, str) and "@" in value: - # probably this is a versioned reference - - return str(value) - else: - # for unversioned references - try: - return int(value) - except ValueError: - # reference via name - - return str(value) - except TypeError as te: - # deal with invalid XML: List of values without appropriate datatype - if isinstance(value, list): - raise TypeError( - "Invalid datatype: List valued properties must be announced by " - "the datatype.\n" + f"Datatype: {datatype}\nvalue: {value}") - else: - # Everything else that's not related to wrong list assignments - raise te - - -def _log_request(request, xml_body=None): - if Container._debug() > 0: - print("\n" + request) - - if xml_body is not None: - print("======== Request body ========\n") - print(xml2str(xml_body)) - print("\n==============================\n") - - -def _log_response(body): - if Container._debug() > 0: - print("\n======== Response body ========\n") - print(body.decode()) - print("\n===============================\n") - - -class QueryTemplate(): - - def __init__(self, id=None, name=None, query=None, description=None): # @ReservedAssignment - - self.id = (int(id) if id is not None else None) - self.role = "QueryTemplate" - self.name = name - self.description = description - self.query = query - self._cuid = None - self.value = None - self.datatype = None - self.messages = Messages() - self.properties = None - self.parents = None - self.path = None - self.file = None - self._checksum = None - self._size = None - self._upload = None - self.unit = None - self.acl = None - self.permissions = None - self.is_valid = lambda: False - self.is_deleted = lambda: False - self.version = None - self.state = None - - def retrieve(self, raise_exception_on_error=True, unique=True, sync=True, - flags=None): - - return Container().append(self).retrieve( - raise_exception_on_error=raise_exception_on_error, - unique=unique, - sync=sync, - flags=flags)[0] - - def insert(self, strict=True, raise_exception_on_error=True, - unique=True, sync=True, flags=None): - - return Container().append(self).insert( - strict=strict, - raise_exception_on_error=raise_exception_on_error, - unique=unique, - sync=sync, - flags=flags)[0] - - def update(self, strict=True, raise_exception_on_error=True, - unique=True, sync=True, flags=None): - - return Container().append(self).update( - strict=strict, - raise_exception_on_error=raise_exception_on_error, - unique=unique, - sync=sync, - flags=flags)[0] - - def delete(self, raise_exception_on_error=True): - return Container().append(self).delete( - raise_exception_on_error=raise_exception_on_error)[0] - - def __repr__(self): - return xml2str(self.to_xml()) - - def to_xml(self, xml=None): - if xml is None: - xml = etree.Element("QueryTemplate") - - if self.name is not None: - xml.set("name", self.name) - - if self.id is not None: - xml.set("id", str(self.id)) - - if self.description is not None: - xml.set("description", self.description) - - if self.version is not None: - xml.append(self.version.to_xml()) - - if self.query is not None: - queryElem = etree.Element("Query") - queryElem.text = self.query - xml.append(queryElem) - - if self.messages is not None: - self.messages.to_xml(xml) - - if self.acl is not None: - xml.append(self.acl.to_xml()) - - return xml - - @staticmethod - def _from_xml(xml): - if xml.tag.lower() == "querytemplate": - q = QueryTemplate(name=xml.get("name"), - description=xml.get("description"), query=None) - - for e in xml: - if e.tag.lower() == "query": - q.query = e.text - else: - child = _parse_single_xml_element(e) - - if isinstance(child, Message): - q.messages.append(child) - elif isinstance(child, ACL): - q.acl = child - elif isinstance(child, Version): - q.version = child - elif isinstance(child, Permissions): - q.permissions = child - q.id = int(xml.get("id")) - - return q - else: - return None - - def clear_server_messages(self): - self.messages.clear_server_messages() - - def get_parents(self): - return [] - - def get_properties(self): - return [] - - def has_id(self): - return self.id is not None - - def get_errors(self): - ret = Messages() - - for m in self.messages: - if m.type.lower() == "error": - ret.append(m) - - return ret - - def get_messages(self): - return self.messages - - def has_errors(self): - return len(self.get_errors()) > 0 - - -class Parent(Entity): - """The parent entities.""" - - @property - def affiliation(self): - if self.__affiliation is not None or self._wrapped_entity is None: - return self.__affiliation - elif hasattr(self._wrapped_entity, "affiliation"): - return self._wrapped_entity.affiliation - - return - - @affiliation.setter - def affiliation(self, affiliation): - self.__affiliation = affiliation - - def __init__(self, id=None, name=None, description=None, inheritance=None): # @ReservedAssignment - Entity.__init__(self, id=id, name=name, description=description) - - if inheritance is not None: - self.set_flag("inheritance", inheritance) - self.__affiliation = None - - def to_xml(self, xml=None, add_properties=None): - if xml is None: - xml = etree.Element("Parent") - - return super().to_xml(xml=xml, add_properties=add_properties) - - -class _EntityWrapper(object): - pass - - -class _ConcreteProperty(_EntityWrapper): - pass - - -class Property(Entity): - - """CaosDB's Property object.""" - - def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, - unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment - """See ``Entity.add_property``.""" - - return super().add_property( - property=property, id=id, name=name, description=description, datatype=datatype, - value=value, unit=unit, importance=importance, inheritance=inheritance) - - def add_parent(self, parent=None, id=None, name=None, inheritance=FIX): - """Add a parent Entity to this Property. - - Parameters - ---------- - Parameters - ---------- - parent : Entity or int or str or None - The parent entity, either specified by the Entity object - itself, or its id or its name. Default is None. - id : int - Integer id of the parent entity. Ignored if `parent` - is not None. - name : str - Name of the parent entity. Ignored if `parent is not - none`. - inheritance : str, default: FIX - One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the - minimum importance which parent properties need to have to be inherited by this - entity. If no `inheritance` is given, no properties will be inherited by the child. - This parameter is case-insensitive. - - See Also - -------- - Entity.add_parent - - """ - - return super(Property, self).add_parent(parent=parent, id=id, name=name, inheritance=inheritance) - - def __init__(self, name=None, id=None, description=None, datatype=None, - value=None, unit=None): - Entity.__init__(self, id=id, name=name, description=description, - datatype=datatype, value=value, role="Property") - self.unit = unit - - def to_xml(self, xml=None, add_properties=ALL): - if xml is None: - xml = etree.Element("Property") - - return super(Property, self).to_xml(xml, add_properties) - - def is_reference(self, server_retrieval=False): - """Returns whether this Property is a reference - - Parameters - ---------- - server_retrieval : bool, optional - If True and the datatype is not set, the Property is retrieved from the server, by default False - - Returns - ------- - bool, NoneType - Returns whether this Property is a reference or None if a server call is needed to - check correctly, but server_retrieval is set to False. - - """ - - if self.datatype is None: - - if not self.is_valid(): - # this is a workaround to prevent side effects - # since retrieve currently changes the object - - if server_retrieval: - tmp_prop = deepcopy(self) - """ - remove role to avoid unnessecary ValueError while - retrieving the Entity. - """ - tmp_prop.role = None - tmp_prop.retrieve() - - return tmp_prop.is_reference() - else: - return None - else: - # a valid property without datatype has to be an RT - - return True - else: - return is_reference(self.datatype) - - -class Message(object): - - def __init__(self, type=None, code=None, description=None, body=None): # @ReservedAssignment - self.description = description - self.type = type if type is not None else "Info" - self.code = int(code) if code is not None else None - self.body = body - - def to_xml(self, xml=None): - if xml is None: - xml = etree.Element(str(self.type)) - - if self.code is not None: - xml.set("code", str(self.code)) - - if self.description: - xml.set("description", str(self.description)) - - if self.body: - xml.text = str(self.body) - - return xml - - def __repr__(self): - return xml2str(self.to_xml()) - - def __eq__(self, obj): - if isinstance(obj, Message): - return self.type == obj.type and self.code == obj.code and self.description == obj.description - - return False - - def get_code(self): - warn(("get_code is deprecated and will be removed in future. " - "Use self.code instead."), DeprecationWarning) - return int(self.code) - - -class RecordType(Entity): - - """This class represents CaosDB's RecordType entities.""" - - def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, - unit=None, importance=RECOMMENDED, inheritance=FIX): # @ReservedAssignment - """See ``Entity.add_property``.""" - - return super().add_property( - property=property, id=id, name=name, description=description, datatype=datatype, - value=value, unit=unit, importance=importance, inheritance=inheritance) - - def add_parent(self, parent=None, id=None, name=None, inheritance=OBLIGATORY): - """Add a parent to this RecordType - - Parameters - ---------- - parent : Entity or int or str or None, optional - The parent entity, either specified by the Entity object - itself, or its id or its name. Default is None. - Parameters - ---------- - parent : Entity or int or str or None - The parent entity, either specified by the Entity object - itself, or its id or its name. Default is None. - id : int - Integer id of the parent entity. Ignored if `parent` - is not None. - name : str - Name of the parent entity. Ignored if `parent is not - none`. - inheritance : str, default OBLIGATORY - One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the - minimum importance which parent properties need to have to be inherited by this - entity. If no `inheritance` is given, no properties will be inherited by the child. - This parameter is case-insensitive. - - See Also - -------- - Entity.add_parent - - """ - - return super().add_parent(parent=parent, id=id, name=name, inheritance=inheritance) - - def __init__(self, name=None, id=None, description=None, datatype=None): # @ReservedAssignment - Entity.__init__(self, name=name, id=id, description=description, - datatype=datatype, role="RecordType") - - def to_xml(self, xml=None, add_properties=ALL): - if xml is None: - xml = etree.Element("RecordType") - - return Entity.to_xml(self, xml, add_properties) - - -class Record(Entity): - - """This class represents CaosDB's Record entities.""" - - def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, - unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment - """See ``Entity.add_property``.""" - - return super().add_property( - property=property, id=id, name=name, description=description, datatype=datatype, - value=value, unit=unit, importance=importance, inheritance=inheritance) - - def __init__(self, name=None, id=None, description=None): # @ReservedAssignment - Entity.__init__(self, name=name, id=id, description=description, - role="Record") - - def to_xml(self, xml=None, add_properties=ALL): - if xml is None: - xml = etree.Element("Record") - - return Entity.to_xml(self, xml, add_properties=ALL) - - -class File(Record): - - """This class represents CaosDB's file entities. - - For inserting a new file to the server, `path` gives the new location, and - (exactly?) one of `file` and `pickup` should (must?) be given to specify the - source of the file. - - Symlinking from the "extroot" file system is not supported by this API yet, - it can be done manually using the `InsertFilesInDir` flag. For sample code, - look at `test_files.py` in the Python integration tests of the - `load_files.py` script in the advanced user tools. - - @param name: A name for this file record (That's an entity name - not to be - confused with the last segment of the files path). - @param id: An ID. - @param description: A description for this file record. - @param path: The complete path, including the file name, of the file in the - server's "caosroot" file system. - @param file: A local path or python file object. The file designated by - this argument will be uploaded to the server via HTTP. - @param pickup: A file/folder in the DropOffBox (the server will move that - file into its "caosroot" file system). - @param thumbnail: (Local) filename to a thumbnail for this file. - @param properties: A list of properties for this file record. @todo is this - implemented? - @param from_location: Deprecated, use `pickup` instead. - - """ - - def __init__(self, name=None, id=None, description=None, # @ReservedAssignment - path=None, file=None, pickup=None, # @ReservedAssignment - thumbnail=None, from_location=None): - Record.__init__(self, id=id, name=name, description=description) - self.role = "File" - self.datatype = None - - # location in the fileserver - self.path = path - - # local file path or pointer to local file - self.file = file - self.thumbnail = thumbnail - - self.pickup = pickup - - if from_location is not None: - warn(DeprecationWarning( - "Param `from_location` is deprecated, use `pickup instead`.")) - - if self.pickup is None: - self.pickup = from_location - - def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): - """Convert this file to an xml element. - - @return: xml element - """ - - if xml is None: - xml = etree.Element("File") - - return Entity.to_xml(self, xml=xml, add_properties=add_properties, - local_serialization=local_serialization) - - def download(self, target=None): - """Download this file-entity's actual file from the file server. It - will be stored to the target or will be hold as a temporary file. - - @param target: Where to store this file. - @return: local path of the downloaded file. - """ - self.clear_server_messages() - - if target: - file_ = open(target, 'wb') - else: - file_ = NamedTemporaryFile(mode='wb', delete=False) - checksum = File.download_from_path(file_, self.path) - - if self._checksum is not None and self._checksum.lower() != checksum.hexdigest().lower(): - raise ConsistencyError( - "The downloaded file had an invalid checksum. Maybe the download did not finish?") - - return file_.name - - @staticmethod - def download_from_path(target_file, path): - - _log_request("GET (download): " + path) - response = get_connection().download_file(path) - - data = response.read(8000) - checksum = sha512() - - while data: - target_file.write(data) - checksum.update(data) - data = response.read(8000) - target_file.close() - - return checksum - - @staticmethod - def _get_checksum(files): - import locale - - if hasattr(files, "name"): - return File._get_checksum_single_file(files.name) - else: - if isdir(files): - checksumappend = "" - - for child in sorted(listdir(files), - key=cmp_to_key(locale.strcoll)): - - if isdir(files + '/' + child): - checksumappend += child - checksumappend += File._get_checksum(files + "/" + child) - checksum = sha512() - checksum.update(checksumappend.encode('utf-8')) - - return checksum.hexdigest() - else: - return File._get_checksum_single_file(files) - - @staticmethod - def _get_checksum_single_file(single_file): - _file = open(single_file, 'rb') - data = _file.read(1000) - checksum = sha512() - - while data: - checksum.update(data) - data = _file.read(1000) - _file.close() - - return checksum.hexdigest() - - def add_property(self, property=None, id=None, name=None, description=None, datatype=None, - value=None, unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment - """See ``Entity.add_property``.""" - - return super().add_property( - property=property, id=id, name=name, description=description, datatype=datatype, - value=value, unit=unit, importance=importance, inheritance=inheritance) - - -class _Properties(list): - - def __init__(self): - list.__init__(self) - self._importance = dict() - self._inheritance = dict() - self._element_by_name = dict() - self._element_by_id = dict() - - def get_importance(self, property): # @ReservedAssignment - if property is not None: - if hasattr(property, "encode"): - property = self.get_by_name(property) # @ReservedAssignment - - return self._importance.get(property) - - def set_importance(self, property, importance): # @ReservedAssignment - if property is not None: - self._importance[property] = importance - - def get_by_name(self, name): - """Get a property of this list via it's name. Raises a CaosDBException - if not exactly one property has this name. - - @param name: the name of the property to be returned. - @return: A property - """ - - return self._element_by_name[name] - - def extend(self, parents): - self.append(parents) - - return self - - def append(self, property, importance=None, inheritance=None): # @ReservedAssignment - if isinstance(property, list): - for p in property: - self.append(p, importance, inheritance) - - return - - if isinstance(property, Entity): - if importance is not None: - self._importance[property] = importance - - if inheritance is not None: - self._inheritance[property] = inheritance - else: - self._inheritance[property] = FIX - - if property.id is not None: - self._element_by_id[str(property.id)] = property - - if property.name is not None: - self._element_by_name[property.name] = property - list.append(self, property) - else: - raise TypeError("Argument was not an entity") - - return self - - def to_xml(self, add_to_element, add_properties): - for p in self: - importance = self._importance.get(p) - - if add_properties == FIX and not importance == FIX: - continue - - pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX) - - if p in self._importance: - pelem.set("importance", importance) - - if p in self._inheritance: - pelem.set("flag", "inheritance:" + - str(self._inheritance.get(p))) - add_to_element.append(pelem) - - return self - - def __repr__(self): - xml = etree.Element("PropertyList") - self.to_xml(xml, add_properties=FIX) - - return xml2str(xml) - - def _get_entity_by_cuid(self, cuid): - ''' - Get the first entity which has the given cuid. - Note: this method is intended for internal use. - @param name: The cuid of the entity to be returned. - @return: Entity with the given cuid. - ''' - - for e in self: - if e._cuid is not None: - if str(e._cuid) == str(cuid): - return e - raise KeyError("No entity with that cuid in this container.") - - def remove(self, prop): - if isinstance(prop, Entity): - if prop in self: - list.remove(self, prop) - - return - else: - if prop.id is not None: - # by id - - for e in self: - if e.id is not None and e.id == prop.id: - list.remove(self, e) - - return - - if prop.name is not None: - # by name - - for e in self: - if e.name is not None and e.name == prop.name: - list.remove(self, e) - - return - elif hasattr(prop, "encode"): - # by name - - for e in self: - if e.name is not None and str(e.name) == str(prop): - list.remove(self, e) - - return - elif isinstance(prop, int): - # by id - - for e in self: - if e.id is not None and e.id == prop: - list.remove(self, e) - - return - raise KeyError(str(prop) + " not found.") - - -class _ParentList(list): - # TODO unclear why this class is private. Isn't it use full for users? - - def _get_entity_by_cuid(self, cuid): - ''' - Get the first entity which has the given cuid. - Note: this method is intended for internal use. - @param name: The cuid of the entity to be returned. - @return: Entity with the given cuid. - ''' - - for e in self: - if e._cuid is not None: - if str(e._cuid) == str(cuid): - return e - raise KeyError("No entity with that cuid in this container.") - - def __init__(self): - list.__init__(self) - self._element_by_name = dict() - self._element_by_id = dict() - - def extend(self, parents): - self.append(parents) - - return self - - def append(self, parent): # @ReservedAssignment - if isinstance(parent, list): - for p in parent: - self.append(p) - - return - - if isinstance(parent, Entity): - if parent.id: - self._element_by_id[str(parent.id)] = parent - - if parent.name: - self._element_by_name[parent.name] = parent - list.append(self, parent) - else: - raise TypeError("Argument was not an Entity") - - return self - - def to_xml(self, add_to_element): - for p in self: - pelem = etree.Element("Parent") - - if p.id is not None: - pelem.set("id", str(p.id)) - - if p._cuid is not None: - pelem.set("cuid", str(p._cuid)) - - if p.name is not None: - pelem.set("name", str(p.name)) - - if p.description is not None: - pelem.set("description", str(p.description)) - - if len(p._flags) > 0: - flagattr = "" - - for key in p._flags.keys(): - flag = p._flags[key] - - if flag is not None and flag != "": - flagattr += str(key) + ":" + str(flag) + "," - else: - flagattr += str(key) + "," - pelem.set("flag", flagattr) - add_to_element.append(pelem) - - def __repr__(self): - xml = etree.Element("ParentList") - self.to_xml(xml) - - return xml2str(xml) - - def remove(self, parent): - if isinstance(parent, Entity): - if parent in self: - list.remove(self, parent) - else: - if parent.id is not None: - # by id - - for e in self: - if e.id is not None and e.id == parent.id: - list.remove(self, e) - - return - - if parent.name is not None: - # by name - - for e in self: - if e.name is not None and e.name == parent.name: - list.remove(self, e) - - return - elif hasattr(parent, "encode"): - # by name - - for e in self: - if e.name is not None and e.name == parent: - list.remove(self, e) - - return - elif isinstance(parent, int): - # by id - - for e in self: - if e.id is not None and e.id == parent: - list.remove(self, e) - - return - raise KeyError(str(parent) + " not found.") - - -class Messages(list): - """This specialization of list stores error, warning, info, and other - messages. The mentioned three messages types play a special role. - They are generated by the client and the server while processing the entity - to which the message in question belongs. It is RECOMMENDED NOT to specify - such messages manually. The other messages are ignored by the server unless - there is a plug-in which interprets them. - - Any message MUST have a type. It MAY have a code (an integer), a description (short string), - or a body (longer string): - - <$Type code=$code description=$description>$body</$Type> - - Error, warning, and info messages will be deleted before any transaction. - - Examples: - <<< msgs = Messages() - - <<< # create Message - <<< msg = Message(type="HelloWorld", code=1, description="Greeting the world", body="Hello, world!") - - <<< # append it to the Messages - <<< msgs.append(msg) - - <<< # use Messages as list of Message objects - <<< for m in msgs: - ... assert isinstance(m,Message) - - <<< # remove it - <<< msgs.remove(msg) - - <<< # ok append it again ... - <<< msgs.append(msg) - <<< # get it back via get(...) and the key tuple (type, code) - <<< assert id(msgs.get("HelloWorld",1))==id(msg) - """ - - def clear_server_messages(self): - """Removes all messages of type error, warning and info. All other - messages types are custom types which should be handled by custom - code.""" - rem = [] - - for m in self: - if m.type.lower() in ["error", "warning", "info"]: - rem.append(m) - - for m in rem: - self.remove(m) - - ####################################################################### - # can be removed after 01.07.24 - # default implementation of list is sufficient - def __setitem__(self, key, value): # @ReservedAssignment - if not isinstance(value, Message): - warn("__setitem__ will in future only accept Message objects as second argument. " - "You will no longe be" - " able to pass bodys such that Message object is created on the fly", - DeprecationWarning) - if not isinstance(key, int): - warn("__setitem__ will in future only accept int as first argument", - DeprecationWarning) - if isinstance(key, tuple): - if len(key) == 2: - type = key[0] # @ReservedAssignment - code = key[1] - elif len(key) == 1: - type = key[0] # @ReservedAssignment - code = None - else: - raise TypeError( - "('type', 'code'), ('type'), or 'type' expected.") - elif isinstance(key, Messages._msg_key): - type = key._type # @ReservedAssignment - code = key._code - else: - type = key # @ReservedAssignment - code = None - - if isinstance(value, tuple): - if len(value) == 2: - description = value[0] - body = value[1] - elif len(value) == 1: - body = value[0] - description = None - else: - raise TypeError( - "('description', 'body'), ('body'), or 'body' expected.") - - if isinstance(value, Message): - body = value.body - description = value.description - m = Message - else: - body = value - description = None - m = Message(type=type, code=code, description=description, body=body) - if isinstance(key, int): - super().__setitem__(key, m) - else: - self.append(m) - - def __getitem__(self, key): - if not isinstance(key, int): - warn("__getitem__ only supports integer keys in future.", DeprecationWarning) - if isinstance(key, tuple): - if len(key) == 2: - type = key[0] # @ReservedAssignment - code = key[1] - elif len(key) == 1: - type = key[0] # @ReservedAssignment - code = None - else: - raise TypeError( - "('type', 'code'), ('type'), or 'type' expected.") - elif isinstance(key, int) and key >= 0: - return super().__getitem__(key) - else: - type = key # @ReservedAssignment - code = None - m = self.get(type, code) - if m is None: - raise KeyError() - if m.description: - return (m.description, m.body) - else: - return m.body - - def __delitem__(self, key): - if isinstance(key, tuple): - warn("__delitem__ only supports integer keys in future.", DeprecationWarning) - if self.get(key[0], key[1]) is not None: - self.remove(self.get(key[0], key[1])) - else: - super().__delitem__(key) - - def remove(self, obj, obj2=None): - if obj2 is not None: - warn("Supplying a second argument to remove is deprecated.", - DeprecationWarning) - super().remove(self.get(obj, obj2)) - else: - super().remove(obj) - - def append(self, msg): - if isinstance(msg, Messages) or isinstance(msg, list): - warn("Supplying a list-like object to append is deprecated. Please use extend" - " instead.", DeprecationWarning) - for m in msg: - self.append(m) - return - - super().append(msg) - - @staticmethod - def _hash(t, c): - return hash(str(t).lower() + (str(",") + str(c) if c is not None else '')) - # end remove - ####################################################################### - - def get(self, type, code=None, default=None, exact=False): # @ReservedAssignment - """ - returns a message from the list that kind of matches type and code - - case and types (str/int) are ignored - - If no suitable message is found, the default argument is returned - If exact=True, the message has to match code and type exactly - """ - if not exact: - warn("The fuzzy mode (exact=False) is deprecated. Please use exact in future.", - DeprecationWarning) - - for msg in self: - if exact: - if msg.type == type and msg.code == code: - return msg - else: - if self._hash(msg.type, msg.code) == self._hash(type, code): - return msg - - return default - - def to_xml(self, add_to_element): - for m in self: - melem = m.to_xml() - add_to_element.append(melem) - - def __repr__(self): - xml = etree.Element("Messages") - self.to_xml(xml) - - return xml2str(xml) - - ####################################################################### - # can be removed after 01.07.24 - class _msg_key: - - def __init__(self, type, code): # @ReservedAssignment - warn("This class is deprecated.", DeprecationWarning) - self._type = type - self._code = code - - @staticmethod - def get(msg): - return Messages._msg_key(msg.type, msg.code) - - def __eq__(self, obj): - return self.__hash__() == obj.__hash__() - - def __hash__(self): - return hash(str(self._type).lower() + (str(",") + str(self._code) - if self._code is not None else '')) - - def __repr__(self): - return str(self._type) + (str(",") + str(self._code) - if self._code is not None else '') - # end remove - ####################################################################### - - -class _Messages(Messages): - def __init__(self, *args, **kwargs): - warn("_Messages is deprecated. " - "Use class Messages instead and beware of the slightly different API of the new" - " Messages class", DeprecationWarning) - super().__init__(*args, **kwargs) - - -def _basic_sync(e_local, e_remote): - '''Copy all state from a one entity to another. - - This method is used to syncronize an entity with a remote (i.e. a newly - retrieved) one. - - Any entity state of the local one will be overriden. - - Parameters - ---------- - e_local : Entity - Destination of the copy. - e_local : Entity - Source of the copy. - - - Returns - ------- - e_local : Entity - The syncronized entity. - ''' - if e_local is None or e_remote is None: - return None - if e_local.role is None: - e_local.role = e_remote.role - elif e_remote.role is not None and not e_local.role.lower() == e_remote.role.lower(): - raise ValueError("The resulting entity had a different role ({0}) " - "than the local one ({1}). This probably means, that " - "the entity was intialized with a wrong class " - "by this client or it has changed in the past and " - "this client did't know about it yet.".format( - e_remote.role, e_local.role)) - - e_local.id = e_remote.id - e_local.name = e_remote.name - e_local.description = e_remote.description - e_local.path = e_remote.path - e_local._checksum = e_remote._checksum - e_local._size = e_remote._size - e_local.datatype = e_remote.datatype - e_local.unit = e_remote.unit - e_local.value = e_remote.value - e_local.properties = e_remote.properties - e_local.parents = e_remote.parents - e_local.messages = e_remote.messages - e_local.acl = e_remote.acl - e_local.permissions = e_remote.permissions - e_local.is_valid = e_remote.is_valid - e_local.is_deleted = e_remote.is_deleted - e_local.version = e_remote.version - e_local.state = e_remote.state - - if hasattr(e_remote, "query"): - e_local.query = e_remote.query - - if hasattr(e_remote, "affiliation"): - e_local.affiliation = e_remote.affiliation - - return e_local - - -def _deletion_sync(e_local, e_remote): - if e_local is None or e_remote is None: - return - - try: - e_remote.get_messages()["info", 10] # try and get the deletion info - except KeyError: - # deletion info wasn't there - e_local.messages = e_remote.messages - - return - - _basic_sync(e_local, e_remote) - e_local.is_valid = lambda: False - e_local.is_deleted = lambda: True - e_local.id = None - - -class Container(list): - """Container is a type safe list for Entities. - - It also provides several short-cuts for transactions like retrieval, - insertion, update, and deletion which are a applied to all entities - in the container or the whole container respectively. - """ - - _debug = staticmethod( - lambda: ( - get_config().getint( - "Container", - "debug") if get_config().has_section("Container") and - get_config().get( - "Container", - "debug") is not None else 0)) - - def is_valid(self): - for e in self: - if not e.is_valid(): - return False - - return True - - def __hash__(self): - return object.__hash__(self) - - def remove(self, entity): - """Remove the first entity from this container which is equal to the - given entity. Raise a ValueError if there is no such entity. - - Alternatively, if the argument is not an entity but an ID, the - contained entity with this ID is removed. - - @param entity: The entity to be removed. - """ - - if entity in self: - super().remove(entity) - else: - for ee in self: - if entity == ee.id: - super().remove(ee) - - return ee - raise ValueError( - "Container.remove(entity): entity not in Container") - - return entity - - def _get_entity_by_cuid(self, cuid): - ''' - Get the first entity which has the given cuid. - Note: this method is intended for internal use. - @param name: The cuid of the entity to be returned. - @return: Entity with the given cuid. - ''' - - for e in self: - if e._cuid is not None: - if str(e._cuid) == str(cuid): - return e - raise KeyError("No entity with such cuid (" + str(cuid) + ")!") - - def get_entity_by_id(self, id): # @ReservedAssignment - """Get the first entity which has the given id. Note: If several - entities are in this list which have the same id, this method will only - return the first and ignore the others. - - @param name: The id of the entity to be returned. - @return: Entity with the given id. - """ - - for e in self: - if e.id: - if e.id == int(id): - return e - raise KeyError("No entity with such id (" + str(id) + ")!") - - def get_all_errors(self): - """Returns a dictionary with all errors from all entities in this - container. - - The dictionary keys are the ids of those entities having - contained an error. - """ - error_list = dict() - - for e in self: - if isinstance(e, Entity): - el = e.get_errors_deep() - - if len(el) > 0: - error_list[str(e.id)] = el - - return error_list - - def get_entity_by_name(self, name, case_sensitive=True): - """Get the first entity which has the given name. Note: If several - entities are in this list which have the same name, this method will - only return the first and ignore the others. - - @param name: the name of the entity to be returned. - @param case_sensitive (True/False): Do a case-sensitive search for name (or not). - @return: Entity with the given name. - """ - - for e in self: - if e.name is not None: - if case_sensitive and e.name == str(name): - return e - elif not case_sensitive and e.name.lower() == str(name).lower(): - return e - raise KeyError("No entity with such name (" + str(name) + ")!") - - def __init__(self): - """Container is a list of entities which can be - inserted/updated/deleted/retrieved at once.""" - list.__init__(self) - self._timestamp = None - self._srid = None - self.messages = Messages() - - def extend(self, entities): - """Extend this Container by appending all single entities in the given - list of entities. - - @param entities: list of entities. - """ - - if isinstance(entities, Container): - for entity in entities: - self.append(entity) - elif isinstance(entities, (list, set)): - for entity in entities: - self.extend(entity) - elif isinstance(entities, Entity): - self.append(entities) - elif isinstance(entities, int): - self.append(entities) - elif hasattr(entities, "encode"): - self.append(entities) - else: - raise TypeError( - "Expected a list or a container (was " + str(type(entities)) + ").") - - return self - - def append(self, entity): - """Append an entity container. - - If the parameter is an integer an entity with the corresponding ID is appended. - If the parameter is a string an entity with the corresponding name is appended. - Raise a TypeError if the entity is not a sub type of the correct class (as defined - via the constructor). - - @param entity: The entity to be appended. - """ - - if isinstance(entity, Entity): - super().append(entity) - elif isinstance(entity, int): - super().append(Entity(id=entity)) - elif hasattr(entity, "encode"): - super().append(Entity(name=entity)) - elif isinstance(entity, QueryTemplate): - super().append(entity) - else: - warn("Entity was neither an id nor a name nor an entity." + - " (was " + str(type(entity)) + ":\n" + str(entity) + ")") - # raise TypeError( - # "Entity was neither an id nor a name nor an entity." + - # " (was " + str(type(entity)) + "\n" + str(entity) + ")") - - return self - - def to_xml(self, add_to_element=None, local_serialization=False): - """Get an xml tree representing this Container or append all entities - to the given xml element. - - @param add_to_element=None: optional element to which all entities of this container is to be appended. - @return xml element - """ - tmpid = 0 - - # users might already have specified some tmpids. -> look for smallest. - - for e in self: - tmpid = min(tmpid, Container._get_smallest_tmpid(e)) - tmpid -= 1 - - if add_to_element is None: - add_to_element = etree.Element("Entities") - - for m in self.messages: - add_to_element.append(m.to_xml()) - - for e in self: - if e.id is None: - e.id = tmpid - tmpid -= 1 - - for e in self: - if isinstance(e, File): - elem = e.to_xml(local_serialization=local_serialization) - else: - elem = e.to_xml() - add_to_element.append(elem) - - return add_to_element - - def get_errors(self): - """Get all error messages of this container. - - @return Messages: Error messages. - """ - - if self.has_errors(): - ret = Messages() - - for m in self.messages: - if m.type.lower() == "error": - ret.append(m) - - return ret - else: - return None - - def get_warnings(self): - """Get all warning messages of this container. - - @return Messages: Warning messages. - """ - - if self.has_warnings(): - ret = Messages() - - for m in self.messages: - if m.type.lower() == "warning": - ret.append(m) - - return ret - else: - return None - - def get_all_messages(self): - ret = Messages() - - for e in self: - ret.extend(e.get_all_messages()) - - return ret - - def add_message(self, m): - self.messages.append(m) - - return self - - def has_warnings(self): - ''' - @return True: if and only if this container has any warning messages. - ''' - - for m in self.messages: - if m.type.lower() == "warning": - return True - - return False - - def has_errors(self): - ''' - @return True: if and only if this container has any error messages. - ''' - - for m in self.messages: - if m.type.lower() == "error": - return True - - return False - - def __str__(self): - return self.__repr__() - - def __repr__(self): - return xml2str(self.to_xml()) - - @staticmethod - def from_xml(xml_str): - """Creates a Container from the given xml string. - - @return The created Container. - """ - - c = Container() - xml = etree.fromstring(xml_str) - - for element in xml: - e = _parse_single_xml_element(element) - c.append(e) - - return c - - @staticmethod - def _response_to_entities(http_response): - """Parse the response of a Http-request. - - Note: Method is intended for the internal use. - """ - body = http_response.read() - _log_response(body) - - xml = etree.fromstring(body) - - if xml.tag.lower() == "response": - c = Container() - - for child in xml: - e = _parse_single_xml_element(child) - - if isinstance(e, Message): - c.messages.append(e) - elif isinstance(e, Query): - c.query = e - - if e.messages is not None: - c.messages.extend(e.messages) - elif isinstance(e, (Entity, QueryTemplate)): - e.is_deleted = lambda: False - - if e.has_errors() is True: - e.is_valid = lambda: False - elif e.id is None or e.id < 0: - e.is_valid = lambda: False - else: - e.is_valid = lambda: True - c.append(e) - else: - # ignore - pass - c._timestamp = xml.get("timestamp") - c._srid = xml.get("srid") - - return c - else: - raise CaosDBException( - "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") - - def _sync(self, container, unique, raise_exception_on_error, - name_case_sensitive=False, strategy=_basic_sync): - """Synchronize this container (C1) with another container (C2). - - That is: 1) Synchronize any entity e1 in C1 with the - corresponding entity e2 from C2 via e1._sync(c2). 2) Add any - leftover entity from C2 to C1. - """ - # TODO: This method is extremely slow. E.g. 30 seconds for 1000 - # entities. - - sync_dict = self._calc_sync_dict( - remote_container=container, - unique=unique, - raise_exception_on_error=raise_exception_on_error, - name_case_sensitive=name_case_sensitive) - - # sync every entity in this container - - for entity in self: - try: - e_sync = sync_dict[entity] - - if e_sync is not None: - strategy(entity, e_sync.pop()) - - for e in e_sync: - self.append(e) - except KeyError: - pass - - # add leftover entities - try: - if sync_dict[self] is not None: - for e in sync_dict[self]: - self.append(e) - except KeyError: - pass - - # messages: - - for m in container.messages: - self.add_message(m) - - self._timestamp = container._timestamp - self._srid = container._srid - - def _calc_sync_dict(self, remote_container, unique, - raise_exception_on_error, name_case_sensitive): - # self is local, remote_container is remote. - - # which is to be synced with which: - # sync_dict[local_entity]=sync_remote_enities - sync_dict = dict() - - # list of remote entities which already have a local equivalent - used_remote_entities = [] - - # match by cuid - - for local_entity in self: - - sync_dict[local_entity] = None - - if local_entity._cuid is not None: - # a list of remote entities which are equivalents of - # local_entity - sync_remote_entities = [] - - for remote_entity in remote_container: - if remote_entity._cuid is not None and str(remote_entity._cuid) == str( - local_entity._cuid) and remote_entity not in used_remote_entities: - sync_remote_entities.append(remote_entity) - used_remote_entities.append(remote_entity) - - if len(sync_remote_entities) > 0: - sync_dict[local_entity] = sync_remote_entities - - if unique and len(sync_remote_entities) > 1: - msg = "Request was not unique. CUID " + \ - str(local_entity._cuid) + " was found " + \ - str(len(sync_remote_entities)) + " times." - local_entity.add_message(Message(description=msg, type="Error")) - - if raise_exception_on_error: - raise MismatchingEntitiesError(msg) - - # match by id - - for local_entity in self: - if sync_dict[local_entity] is None and local_entity.id is not None: - sync_remote_entities = [] - - for remote_entity in remote_container: - if (remote_entity.id is not None - and remote_entity.id == local_entity.id - and remote_entity not in used_remote_entities): - sync_remote_entities.append(remote_entity) - used_remote_entities.append(remote_entity) - - if len(sync_remote_entities) > 0: - sync_dict[local_entity] = sync_remote_entities - - if unique and len(sync_remote_entities) > 1: - msg = "Request was not unique. ID " + \ - str(local_entity.id) + " was found " + \ - str(len(sync_remote_entities)) + " times." - local_entity.add_message(Message(description=msg, type="Error")) - - if raise_exception_on_error: - raise MismatchingEntitiesError(msg) - - # match by path - - for local_entity in self: - if (sync_dict[local_entity] is None - and local_entity.path is not None): - sync_remote_entities = [] - - for remote_entity in remote_container: - if (remote_entity.path is not None - and str(remote_entity.path) == ( - local_entity.path - - if local_entity.path.startswith("/") else "/" + - local_entity.path) - and remote_entity not in used_remote_entities): - sync_remote_entities.append(remote_entity) - used_remote_entities.append(remote_entity) - - if len(sync_remote_entities) > 0: - sync_dict[local_entity] = sync_remote_entities - - if unique and len(sync_remote_entities) > 1: - msg = "Request was not unique. Path " + \ - str(local_entity.path) + " was found " + \ - str(len(sync_remote_entities)) + " times." - local_entity.add_message(Message(description=msg, type="Error")) - - if raise_exception_on_error: - raise MismatchingEntitiesError(msg) - - # match by name - - for local_entity in self: - if (sync_dict[local_entity] is None - and local_entity.name is not None): - sync_remote_entities = [] - - for remote_entity in remote_container: - if (remote_entity.name is not None - and (str(remote_entity.name) == str(local_entity.name) - or - (name_case_sensitive is False and - str(remote_entity.name).lower() == str( - local_entity.name).lower())) - and remote_entity not in used_remote_entities): - sync_remote_entities.append(remote_entity) - used_remote_entities.append(remote_entity) - - if len(sync_remote_entities) > 0: - sync_dict[local_entity] = sync_remote_entities - - if unique and len(sync_remote_entities) > 1: - msg = "Request was not unique. Name " + \ - str(local_entity.name) + " was found " + \ - str(len(sync_remote_entities)) + " times." - local_entity.add_message(Message(description=msg, type="Error")) - - if raise_exception_on_error: - raise MismatchingEntitiesError(msg) - - # add remaining entities to this remote_container - sync_remote_entities = [] - - for remote_entity in remote_container: - if not (remote_entity in used_remote_entities): - sync_remote_entities.append(remote_entity) - - if len(sync_remote_entities) > 0: - sync_dict[self] = sync_remote_entities - - if unique and len(sync_remote_entities) != 0: - msg = "Request was not unique. There are " + \ - str(len(sync_remote_entities)) + \ - " entities which could not be matched to one of the requested ones." - remote_container.add_message(Message(description=msg, type="Error")) - - if raise_exception_on_error: - raise MismatchingEntitiesError(msg) - - return sync_dict - - def _test_dependencies_in_container(self, container): - """This function returns those elements of a given container that are a dependency of another element of the same container. - - Args: - container (Container): a caosdb container - - Returns: - [set]: a set of unique elements that are a dependency of another element of `container` - """ - item_id = set() - is_parent = set() - is_property = set() - is_being_referenced = set() - dependent_parents = set() - dependent_properties = set() - dependent_references = set() - dependencies = set() - - for container_item in container: - item_id.add(container_item.id) - - for parents in container_item.get_parents(): - is_parent.add(parents.id) - - for references in container_item.get_properties(): - if is_reference(references.datatype): - # add only if it is a reference, not a property - - if references.value is None: - continue - elif isinstance(references.value, int): - is_being_referenced.add(references.value) - elif is_list_datatype(references.datatype): - for list_item in references.value: - if isinstance(list_item, int): - is_being_referenced.add(list_item) - else: - is_being_referenced.add(list_item.id) - else: - try: - is_being_referenced.add(references.value.id) - except AttributeError: - pass - - if hasattr(references, 'id'): - is_property.add(references.id) - - dependent_parents = item_id.intersection(is_parent) - dependent_properties = item_id.intersection(is_property) - dependent_references = item_id.intersection(is_being_referenced) - dependencies = dependent_parents.union(dependent_references) - dependencies = dependencies.union(dependent_properties) - - return dependencies - - def delete(self, raise_exception_on_error=True, flags=None, chunk_size=100): - """Delete all entities in this container. - - Entities are identified via their id if present and via their - name otherwise. If any entity has no id and no name a - TransactionError will be raised. - - Note: If only a name is given this could lead to ambiguities. If - this happens, none of them will be deleted. It occurs an error - instead. - """ - item_count = len(self) - # Split Container in 'chunk_size'-sized containers (if necessary) to avoid error 414 Request-URI Too Long - - if item_count > chunk_size: - dependencies = self._test_dependencies_in_container(self) - ''' - If there are as many dependencies as entities in the container and it is larger than chunk_size it cannot be split and deleted. - This case cannot be handled at the moment. - ''' - - if len(dependencies) == item_count: - if raise_exception_on_error: - te = TransactionError( - msg="The container is too large and with too many dependencies within to be deleted.", - container=self) - raise te - - return self - - # items which have to be deleted later because of dependencies. - dependencies_delete = Container() - - for i in range(0, int(item_count/chunk_size)+1): - chunk = Container() - - for j in range(i*chunk_size, min(item_count, (i+1)*chunk_size)): - if len(dependencies): - if self[j].id in dependencies: - dependencies_delete.append(self[j]) - else: - chunk.append(self[j]) - else: - chunk.append(self[j]) - - if len(chunk): - chunk.delete() - if len(dependencies_delete): - dependencies_delete.delete() - - return self - - if len(self) == 0: - if raise_exception_on_error: - te = TransactionError( - msg="There are no entities to be deleted. This container is empty.", - container=self) - raise te - - return self - self.clear_server_messages() - - c = get_connection() - id_str = [] - - for entity in self: - if entity.is_deleted(): - continue - entity._cuid = None - - if entity.id is not None: - id_str.append(str(entity.id)) - elif entity.name is not None: - id_str.append(str(entity.name)) - else: - entity.add_message( - Message( - type="Error", - description="This entity has no identifier. It cannot be deleted.")) - - if raise_exception_on_error: - ee = EntityError( - "This entity has no identifier. It cannot be deleted.", entity) - raise TransactionError(ee) - entity.is_valid = lambda: False - - if len(id_str) == 0: - if raise_exception_on_error: - te = TransactionError( - msg="There are no entities to be deleted.", - container=self) - raise te - - return self - entity_url_segments = [_ENTITY_URI_SEGMENT, "&".join(id_str)] - - _log_request("DELETE: " + str(entity_url_segments) + - ("?" + str(flags) if flags is not None else '')) - - http_response = c.delete(entity_url_segments, query_dict=flags) - cresp = Container._response_to_entities(http_response) - self._sync(cresp, raise_exception_on_error=raise_exception_on_error, - unique=True, strategy=_deletion_sync) - - if raise_exception_on_error: - raise_errors(self) - - return self - - def retrieve(self, query=None, unique=True, - raise_exception_on_error=True, sync=True, flags=None): - """Retrieve all entities in this container identified via their id if - present and via their name otherwise. Any locally already existing - attributes (name, description, ...) will be preserved. Any such - properties and parents will be synchronized as well. They will not be - overridden. This method returns a Container containing the this entity. - - If any entity has no id and no name a CaosDBException will be raised. - - Note: If only a name is given this could lead to ambiguities. All entities with the name in question - will be returned. Therefore, the container could contain more elements after the retrieval than - before. - """ - - if isinstance(query, list): - self.extend(query) - query = None - cresp = Container() - entities_str = [] - - if query is None: - for entity in self: - if entity.id is not None and entity.id < 0: - entity.id = None - entity.clear_server_messages() - - if entity.id is not None: - entities_str.append(str(entity.id)) - elif entity.name is not None: - entities_str.append(str(entity.name)) - elif entity.path is not None: - # fetch by path (files only) - cresp.extend(execute_query( - "FIND FILE . STORED AT \"" + str(entity.path) + "\"", unique=False)) - else: - entity.add_message( - Message( - type="Error", - description="This entity has no identifier. It cannot be retrieved.")) - - if raise_exception_on_error: - ee = EntityError( - "This entity has no identifier. It cannot be retrieved.", - entity) - raise TransactionError(ee) - entity.is_valid = lambda: False - else: - entities_str.append(str(query)) - - self.clear_server_messages() - cresp2 = self._retrieve(entities=entities_str, flags=flags) - cresp.extend(cresp2) - cresp.messages.extend(cresp2.messages) - - if raise_exception_on_error: - raise_errors(cresp) - - if sync: - self._sync(cresp, unique=unique, - raise_exception_on_error=raise_exception_on_error) - - return self - else: - return cresp - - @staticmethod - def _split_uri_string(entities): - - # get half length of entities_str - hl = len(entities) // 2 - - # split in two uris - - return (entities[0:hl], entities[hl:len(entities)]) - - def _retrieve(self, entities, flags): - c = get_connection() - try: - _log_request("GET: " + _ENTITY_URI_SEGMENT + str(entities) + - ('' if flags is None else "?" + str(flags))) - http_response = c.retrieve( - entity_uri_segments=[ - _ENTITY_URI_SEGMENT, str( - "&".join(entities))], query_dict=flags) - - return Container._response_to_entities(http_response) - except HTTPURITooLongError as uri_e: - try: - # split up - uri1, uri2 = Container._split_uri_string(entities) - except ValueError as val_e: - raise uri_e from val_e - c1 = self._retrieve(entities=uri1, flags=flags) - c2 = self._retrieve(entities=uri2, flags=flags) - c1.extend(c2) - c1.messages.extend(c2.messages) - - return c1 - - def clear_server_messages(self): - self.messages.clear_server_messages() - - for entity in self: - entity.clear_server_messages() - - return self - - @staticmethod - def _dir_to_http_parts(root, d, upload): # @ReservedAssignment - ret = [] - x = (root + '/' + d if d is not None else root) - - for f in listdir(x): - if isdir(x + '/' + f): - part = MultipartParam( - name=hex(randint(0, sys.maxsize)), value="") - part.filename = upload + \ - ('/' + d + '/' if d is not None else '/') + f + '/' - ret.extend(Container._dir_to_http_parts( - root, (d + '/' + f if d is not None else f), upload)) - else: - part = MultipartParam.from_file( - paramname=hex(randint(0, sys.maxsize)), filename=x + '/' + f) - part.filename = upload + \ - ('/' + d + '/' if d is not None else '/') + f - ret.append(part) - - return ret - - def update(self, strict=False, raise_exception_on_error=True, - unique=True, sync=True, flags=None): - """Update these entites.""" - - if len(self) < 1: - te = TransactionError( - msg="There are no entities to be updated. This container is empty.", - container=self) - raise te - - self.clear_server_messages() - insert_xml = etree.Element("Update") - http_parts = [] - - if flags is None: - flags = {} - - if strict is True: - flags["strict"] = "true" - - if unique is True: - flags["uniquename"] = "true" - - for entity in self: - if (entity.id is None or entity.id < 0): - ee = EntityError( - "You tried to update an entity without a valid id.", - entity) - raise TransactionError(ee) - - self._linearize() - - for entity in self: - - # process files if present - Container._process_file_if_present_and_add_to_http_parts( - http_parts, entity) - - for entity in self: - entity_xml = entity.to_xml() - - if hasattr(entity, '_upload') and entity._upload is not None: - entity_xml.set("upload", entity._upload) - elif hasattr(entity, 'pickup') and entity.pickup is not None: - entity_xml.set("pickup", entity.pickup) - - insert_xml.append(entity_xml) - - _log_request("PUT: " + _ENTITY_URI_SEGMENT + - ('' if flags is None else "?" + str(flags)), insert_xml) - - con = get_connection() - - if http_parts is not None and len(http_parts) > 0: - http_parts.insert( - 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) - body, headers = multipart_encode(http_parts) - - http_response = con.update( - entity_uri_segment=[_ENTITY_URI_SEGMENT], - query_dict=flags, - body=body, - headers=headers) - else: - http_response = con.update( - entity_uri_segment=[_ENTITY_URI_SEGMENT], query_dict=flags, - body=xml2str(insert_xml)) - - cresp = Container._response_to_entities(http_response) - - if raise_exception_on_error: - raise_errors(cresp) - - if sync: - self._sync(cresp, unique=unique, - raise_exception_on_error=raise_exception_on_error) - - return self - else: - return cresp - - @staticmethod - def _process_file_if_present_and_add_to_http_parts(http_parts, entity): - if isinstance(entity, File) and hasattr( - entity, 'file') and entity.file is not None: - new_checksum = File._get_checksum(entity.file) - - # do not transfer unchanged files. - - if entity._checksum is not None and entity._checksum.lower() == new_checksum.lower(): - entity._upload = None - - return - - entity._size = None - entity._checksum = new_checksum - entity._upload = hex(randint(0, sys.maxsize)) - - if hasattr(entity.file, "name"): - _file = entity.file.name - else: - _file = entity.file - - if isdir(_file): - http_parts.extend( - Container._dir_to_http_parts(_file, None, entity._upload)) - part = MultipartParam( - name=hex(randint(0, sys.maxsize)), value="") - part.filename = entity._upload + '/' - else: - part = MultipartParam.from_file( - paramname=hex(randint(0, sys.maxsize)), filename=_file) - part.filename = entity._upload - http_parts.append(part) - - if entity.thumbnail is not None: - part = MultipartParam.from_file(paramname=hex( - randint(0, sys.maxsize)), filename=entity.thumbnail) - part.filename = entity._upload + ".thumbnail" - http_parts.append(part) - else: - entity._checksum = None - - def insert(self, strict=False, raise_exception_on_error=True, - unique=True, sync=True, flags=None): - """Insert this file entity into CaosDB. A successful insertion will - generate a new persistent ID for this entity. This entity can be - identified, retrieved, updated, and deleted via this ID until it has - been deleted. - - If the insertion fails, a CaosDBException will be raised. The server will have returned at - least one error-message describing the reason why it failed in that case (call - <this_entity>.get_all_messages() in order to get these error-messages). - - Some insertions might cause warning-messages on the server-side, but the entities are inserted - anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. - This prevents the server from inserting this entity if any warning occurs. - - @param strict=False: Flag for strict mode. - @param sync=True: synchronize this container with the response from the server. Otherwise, - this method returns a new container with the inserted entities and leaves this container untouched. - """ - - self.clear_server_messages() - insert_xml = etree.Element("Insert") - http_parts = [] - - if flags is None: - flags = {} - - if strict: - flags["strict"] = "true" - - if unique: - flags["uniquename"] = "true" - - self._linearize() - - # TODO: This is a possible solution for ticket#137 -# retrieved = Container() -# for entity in self: -# if entity.is_valid(): -# retrieved.append(entity) -# if len(retrieved)>0: -# retrieved = retrieved.retrieve(raise_exception_on_error=False, sync=False) -# for e_remote in retrieved: -# if e_remote.id is not None: -# try: -# self.get_entity_by_id(e_remote.id).is_valid=e_remote.is_valid -# continue -# except KeyError: -# pass -# if e_remote.name is not None: -# try: -# self.get_entity_by_name(e_remote.name).is_valid=e_remote.is_valid -# continue -# except KeyError: -# pass - for entity in self: - if entity.is_valid(): - continue - - # process files if present - Container._process_file_if_present_and_add_to_http_parts( - http_parts, entity) - - for entity in self: - if entity.is_valid(): - continue - entity_xml = entity.to_xml() - - if hasattr(entity, '_upload') and entity._upload is not None: - entity_xml.set("upload", entity._upload) - elif hasattr(entity, 'pickup') and entity.pickup is not None: - entity_xml.set("pickup", entity.pickup) - insert_xml.append(entity_xml) - - if len(self) > 0 and len(insert_xml) < 1: - te = TransactionError( - msg="There are no entities to be inserted. This container contains existent entities only.", - container=self) - raise te - _log_request("POST: " + _ENTITY_URI_SEGMENT + - ('' if flags is None else "?" + str(flags)), insert_xml) - - con = get_connection() - - if http_parts is not None and len(http_parts) > 0: - http_parts.insert( - 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) - - body, headers = multipart_encode(http_parts) - http_response = con.insert( - entity_uri_segment=[_ENTITY_URI_SEGMENT], - body=body, - headers=headers, - query_dict=flags) - else: - http_response = con.insert( - entity_uri_segment=[_ENTITY_URI_SEGMENT], - body=xml2str(insert_xml), - query_dict=flags) - - cresp = Container._response_to_entities(http_response) - - if sync: - - self._sync(cresp, unique=unique, - raise_exception_on_error=raise_exception_on_error) - - if raise_exception_on_error: - raise_errors(self) - - return self - else: - if raise_exception_on_error: - raise_errors(cresp) - - return cresp - - @staticmethod - def _get_smallest_tmpid(entity): - tmpid = 0 - - if entity.id is not None: - tmpid = min(tmpid, int(entity.id)) - - for p in entity.get_parents(): - if p.id is not None: - tmpid = min(tmpid, int(p.id)) - - for p in entity.get_properties(): - if p.id is not None: - tmpid = min(tmpid, Container._get_smallest_tmpid(p)) - - return tmpid - - def _linearize(self): - tmpid = 0 - ''' users might already have specified some tmpids. -> look for smallest.''' - - for e in self: - tmpid = min(tmpid, Container._get_smallest_tmpid(e)) - - tmpid -= 1 - - '''a tmpid for every entity''' - - for e in self: - if e.id is None: - e.id = tmpid - tmpid -= 1 - - # CUID - - if e._cuid is None or e._cuid == 'None' or e._cuid == '': - e._cuid = str(e.id) + "--" + str(uuid()) - - '''dereference properties and parents''' - - for e in self: - """properties.""" - - for p in e.get_properties(): - if p.id is None: - if p.name is not None: - # TODO using try except for normal execution flow is bad style - try: - w = self.get_entity_by_name(p.name) - p._wrap(w) - except KeyError: - pass - - '''parents''' - - for p in e.get_parents(): - if p.id is None: - if p.name is not None: - # TODO using try except for normal execution flow is bad style - try: - p._wrap(self.get_entity_by_name(p.name)) - except KeyError: - pass - - return self - - def get_property_values(self, *selectors): - """ Return a list of tuples with values of the given selectors. - - I.e. a tabular representation of the container's content. - - If the elements of the selectors parameter are tuples, they will return - the properties of the referenced entity, if present. E.g. ("window", - "height") will return the value of the height property of the - referenced window entity. - - All tuples of the returned list have the same length as the selectors - parameter and the ordering of the tuple's values correspond to the - order of the parameter as well. - - The tuple contains None for all values that are not available in the - entity. That does not necessarily mean, that the values are not stored - in the database (e.g. if a single entity was retrieved without - referenced entities). - - Parameters - ---------- - *selectors : str or tuple of str - Each selector is a list or tuple of property names, e.g. `"height", - "width"`. - - Returns - ------- - table : list of tuples - A tabular representation of the container's content. - """ - table = [] - - for e in self: - table.append(e.get_property_values(*selectors)) - - return table - - -def sync_global_acl(): - c = get_connection() - http_response = c.retrieve(entity_uri_segments=["EntityPermissions"]) - body = http_response.read() - _log_response(body) - - xml = etree.fromstring(body) - - if xml.tag.lower() == "response": - for child in xml: - if child.tag == "EntityPermissions": - Permissions.known_permissions = Permissions(child) - - for pelem in child: - if pelem.tag == "EntityACL": - ACL.global_acl = ACL(xml=pelem) - else: - raise CaosDBException( - "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") - - -def get_known_permissions(): - if Permissions.known_permissions is None: - sync_global_acl() - - return Permissions.known_permissions - - -def get_global_acl(): - if ACL.global_acl is None: - sync_global_acl() - - return ACL.global_acl - - -class ACI(): - def __init__(self, realm, username, role, permission): - self.role = role - self.username = username - self.realm = realm - self.permission = permission - - def __hash__(self): - return hash(self.__repr__()) - - def __eq__(self, other): - return isinstance(other, ACI) and (self.role is None and self.username == other.username and self.realm == - other.realm) or self.role == other.role and self.permission == other.permission - - def __repr__(self): - return str(self.realm) + ":" + str(self.username) + ":" + str(self.role) + ":" + str(self.permission) - - def add_to_element(self, e): - if self.role is not None: - e.set("role", self.role) - else: - e.set("username", self.username) - - if self.realm is not None: - e.set("realm", self.realm) - p = etree.Element("Permission") - p.set("name", self.permission) - e.append(p) - - -class ACL(): - - global_acl = None - - def __init__(self, xml=None): - if xml is not None: - self.parse_xml(xml) - else: - self.clear() - - def parse_xml(self, xml): - """Clear this ACL and parse the xml. - - Iterate over the rules in the xml and add each rule to this ACL. - - Contradicting rules will both be kept. - - Parameters - ---------- - xml : lxml.etree.Element - The xml element containing the ACL rules, i.e. <Grant> and <Deny> - rules. - """ - self.clear() - self._parse_xml(xml) - - def _parse_xml(self, xml): - """Parse the xml. - - Iterate over the rules in the xml and add each rule to this ACL. - - Contradicting rules will both be kept. - - Parameters - ---------- - xml : lxml.etree.Element - The xml element containing the ACL rules, i.e. <Grant> and <Deny> - rules. - """ - # @review Florian Spreckelsen 2022-03-17 - for e in xml: - role = e.get("role") - username = e.get("username") - realm = e.get("realm") - priority = e.get("priority") - - for p in e: - if p.tag == "Permission": - permission = p.get("name") - - if e.tag == "Grant": - self.grant(username=username, realm=realm, role=role, - permission=permission, priority=priority, - revoke_denial=False) - elif e.tag == "Deny": - self.deny(username=username, realm=realm, role=role, - permission=permission, priority=priority, - revoke_grant=False) - - def combine(self, other): - """ Combine and return new instance.""" - result = ACL() - result._grants.update(other._grants) - result._grants.update(self._grants) - result._denials.update(other._denials) - result._denials.update(self._denials) - result._priority_grants.update(other._priority_grants) - result._priority_grants.update(self._priority_grants) - result._priority_denials.update(other._priority_denials) - result._priority_denials.update(self._priority_denials) - - return result - - def __eq__(self, other): - return isinstance(other, ACL) and other._grants == self._grants and self._denials == other._denials and self._priority_grants == other._priority_grants and self._priority_denials == other._priority_denials - - def is_empty(self): - return len(self._grants) + len(self._priority_grants) + \ - len(self._priority_denials) + len(self._denials) == 0 - - def clear(self): - self._grants = set() - self._denials = set() - self._priority_grants = set() - self._priority_denials = set() - - def _get_boolean_priority(self, priority): - return str(priority).lower() in ["true", "1", "yes", "y"] - - def _remove_item(self, item, priority): - try: - self._denials.remove(item) - except KeyError: - pass - try: - self._grants.remove(item) - except KeyError: - pass - - if priority: - try: - self._priority_denials.remove(item) - except KeyError: - pass - try: - self._priority_grants.remove(item) - except KeyError: - pass - - def revoke_grant(self, username=None, realm=None, - role=None, permission=None, priority=False): - priority = self._get_boolean_priority(priority) - item = ACI(role=role, username=username, - realm=realm, permission=permission) - - if priority: - if item in self._priority_grants: - self._priority_grants.remove(item) - - if item in self._grants: - self._grants.remove(item) - - def revoke_denial(self, username=None, realm=None, - role=None, permission=None, priority=False): - priority = self._get_boolean_priority(priority) - item = ACI(role=role, username=username, - realm=realm, permission=permission) - - if priority: - if item in self._priority_denials: - self._priority_denials.remove(item) - - if item in self._denials: - self._denials.remove(item) - - def grant(self, permission, username=None, realm=None, role=None, - priority=False, revoke_denial=True): - """Grant a permission to a user or role. - - You must specify either only the username and the realm, or only the - role. - - By default a previously existing denial rule would be revoked, because - otherwise this grant wouldn't have any effect. However, for keeping - contradicting rules pass revoke_denial=False. - - Parameters - ---------- - permission: str - The permission to be granted. - username : str, optional - The username. Exactly one is required, either the `username` or the - `role`. - realm: str, optional - The user's realm. Required when username is not None. - role: str, optional - The role (as in Role-Based Access Control). Exactly one is - required, either the `username` or the `role`. - priority: bool, default False - Whether this permission is granted with priority over non-priority - rules. - revoke_denial: bool, default True - Whether a contradicting denial (with same priority flag) in this - ACL will be revoked. - """ - # @review Florian Spreckelsen 2022-03-17 - priority = self._get_boolean_priority(priority) - item = ACI(role=role, username=username, - realm=realm, permission=permission) - if revoke_denial: - self._remove_item(item, priority) - - if priority is True: - self._priority_grants.add(item) - else: - self._grants.add(item) - - def deny(self, username=None, realm=None, role=None, - permission=None, priority=False, revoke_grant=True): - """Deny a permission to a user or role for this entity. - - You must specify either only the username and the realm, or only the - role. - - By default a previously existing grant rule would be revoked, because - otherwise this denial would override the grant rules anyways. However, - for keeping contradicting rules pass revoke_grant=False. - - Parameters - ---------- - permission: str - The permission to be denied. - username : str, optional - The username. Exactly one is required, either the `username` or the - `role`. - realm: str, optional - The user's realm. Required when username is not None. - role: str, optional - The role (as in Role-Based Access Control). Exactly one is - required, either the `username` or the `role`. - priority: bool, default False - Whether this permission is denied with priority over non-priority - rules. - revoke_grant: bool, default True - Whether a contradicting grant (with same priority flag) in this - ACL will be revoked. - """ - # @review Florian Spreckelsen 2022-03-17 - priority = self._get_boolean_priority(priority) - item = ACI(role=role, username=username, - realm=realm, permission=permission) - if revoke_grant: - self._remove_item(item, priority) - - if priority is True: - self._priority_denials.add(item) - else: - self._denials.add(item) - - def to_xml(self, xml=None): - if xml is None: - xml = etree.Element("EntityACL") - - for aci in self._grants: - e = etree.Element("Grant") - e.set("priority", "False") - aci.add_to_element(e) - xml.append(e) - - for aci in self._denials: - e = etree.Element("Deny") - e.set("priority", "False") - aci.add_to_element(e) - xml.append(e) - - for aci in self._priority_grants: - e = etree.Element("Grant") - e.set("priority", "True") - aci.add_to_element(e) - xml.append(e) - - for aci in self._priority_denials: - e = etree.Element("Deny") - e.set("priority", "True") - aci.add_to_element(e) - xml.append(e) - - return xml - - def get_acl_for_role(self, role): - ret = ACL() - - for aci in self._grants: - if aci.role == role: - ret._grants.add(aci) - - for aci in self._denials: - if aci.role == role: - ret._denials.add(aci) - - for aci in self._priority_grants: - if aci.role == role: - ret._priority_grants.add(aci) - - for aci in self._priority_denials: - if aci.role == role: - ret._priority_denials.add(aci) - - return ret - - def get_acl_for_user(self, username, realm=None): - ret = ACL() - - for aci in self._grants: - if aci.username == username and ( - realm is None or aci.realm == realm): - ret._grants.add(aci) - - for aci in self._denials: - if aci.username == username and ( - realm is None or aci.realm == realm): - ret._denials.add(aci) - - for aci in self._priority_grants: - if aci.username == username and ( - realm is None or aci.realm == realm): - ret._priority_grants.add(aci) - - for aci in self._priority_denials: - if aci.username == username and ( - realm is None or aci.realm == realm): - ret._priority_denials.add(aci) - - return ret - - def get_permissions_for_user(self, username, realm=None): - acl = self.get_acl_for_user(username, realm) - _grants = set() - - for aci in acl._grants: - _grants.add(aci.permission) - _denials = set() - - for aci in acl._denials: - _denials.add(aci.permission) - _priority_grants = set() - - for aci in acl._priority_grants: - _priority_grants.add(aci.permission) - _priority_denials = set() - - for aci in acl._priority_denials: - _priority_denials.add(aci.permission) - - return ((_grants - _denials) | _priority_grants) - _priority_denials - - def get_permissions_for_role(self, role): - acl = self.get_acl_for_role(role) - _grants = set() - - for aci in acl._grants: - _grants.add(aci.permission) - _denials = set() - - for aci in acl._denials: - _denials.add(aci.permission) - _priority_grants = set() - - for aci in acl._priority_grants: - _priority_grants.add(aci.permission) - _priority_denials = set() - - for aci in acl._priority_denials: - _priority_denials.add(aci.permission) - - return ((_grants - _denials) | _priority_grants) - _priority_denials - - def is_permitted(self, role, permission): - return permission in self.get_permissions_for_role(role) - - def __repr__(self): - return xml2str(self.to_xml()) - - -class Query(): - """Query - - Attributes - ---------- - q : str - The query string. - flags : dict of str - A dictionary of flags to be send with the query request. - messages : Messages() - A container of messages included in the last query response. - cached : bool - indicates whether the server used the query cache for the execution of - this query. - results : int or Container - The number of results (when this was a count query) or the container - with the resulting entities. - """ - - def putFlag(self, key, value=None): - self.flags[key] = value - - return self - - def removeFlag(self, key): - return self.flags.pop(key) - - def getFlag(self, key): - return self.flags.get(key) - - def __init__(self, q): - self.flags = dict() - self.messages = Messages() - self.cached = None - self.etag = None - - if isinstance(q, etree._Element): - self.q = q.get("string") - self.results = int(q.get("results")) - - if q.get("cached") is None: - self.cached = False - else: - self.cached = q.get("cached").lower() == "true" - self.etag = q.get("etag") - - for m in q: - if m.tag.lower() == 'warning' or m.tag.lower() == 'error': - self.messages.append(_parse_single_xml_element(m)) - else: - self.q = q - - def _query_request(self, query_dict): - """Used internally to execute the query request...""" - _log_request("GET Entity?" + str(query_dict), None) - connection = get_connection() - http_response = connection.retrieve( - entity_uri_segments=["Entity"], - query_dict=query_dict) - cresp = Container._response_to_entities(http_response) - return cresp - - def _paging_generator(self, first_page, query_dict, page_length): - """Used internally to create a generator of pages instead instead of a - container which contais all the results.""" - if len(first_page) == 0: - return # empty page - yield first_page - index = page_length - while self.results > index: - query_dict["P"] = f"{index}L{page_length}" - next_page = self._query_request(query_dict) - etag = next_page.query.etag - if etag is not None and etag != self.etag: - raise PagingConsistencyError("The database state changed while retrieving the pages") - yield next_page - index += page_length - - def execute(self, unique=False, raise_exception_on_error=True, cache=True, - page_length=None): - """Execute a query (via a server-requests) and return the results. - - Parameters - ---------- - - unique : bool - Whether the query is expected to have only one entity as result. - Defaults to False. - raise_exception_on_error : bool - Whether an exception should be raises when there are errors in the - resulting entities. Defaults to True. - cache : bool - Whether to use the server-side query cache (equivalent to adding a - "cache" flag) to the Query object. Defaults to True. - page_length : int - Whether to use paging. If page_length > 0 this method returns a - generator (to be used in a for-loop or with list-comprehension). - The generator yields containers with up to page_length entities. - Otherwise, paging is disabled, as well as for count queries and - when unique is True. Defaults to None. - - Raises: - ------- - PagingConsistencyError - If the database state changed between paged requests. - - Yields - ------ - page : Container - Returns a container with the next `page_length` resulting entities. - - Returns - ------- - results : Container or integer - Returns an integer when it was a `COUNT` query. Otherwise, returns a - Container with the resulting entities. - """ - flags = self.flags - - if cache is False: - flags["cache"] = "false" - query_dict = dict(flags) - query_dict["query"] = str(self.q) - - has_paging = False - is_count_query = self.q.split()[0].lower() == "count" if len(self.q.split()) > 0 else False - - if not unique and not is_count_query and page_length is not None and page_length > 0: - has_paging = True - query_dict["P"] = f"0L{page_length}" - - # retreive first/only page - cresp = self._query_request(query_dict) - - self.results = cresp.query.results - self.cached = cresp.query.cached - self.etag = cresp.query.etag - - if is_count_query: - return self.results - - if raise_exception_on_error: - raise_errors(cresp) - - if unique: - if len(cresp) > 1 and raise_exception_on_error: - raise QueryNotUniqueError( - "Query '{}' wasn't unique.".format(self.q)) - - if len(cresp) == 0 and raise_exception_on_error: - raise EmptyUniqueQueryError( - "Query '{}' found no results.".format(self.q)) - - if len(cresp) == 1: - r = cresp[0] - r.messages.extend(cresp.messages) - - return r - self.messages = cresp.messages - - if has_paging: - return self._paging_generator(cresp, query_dict, page_length) - else: - return cresp - - -def execute_query(q, unique=False, raise_exception_on_error=True, cache=True, - flags=None, page_length=None): - """Execute a query (via a server-requests) and return the results. - - Parameters - ---------- - - q : str - The query string. - unique : bool - Whether the query is expected to have only one entity as result. - Defaults to False. - raise_exception_on_error : bool - Whether an exception should be raised when there are errors in the - resulting entities. Defaults to True. - cache : bool - Whether to use the query server-side cache (equivalent to adding a - "cache" flag). Defaults to True. - flags : dict of str - Flags to be added to the request. - page_length : int - Whether to use paging. If page_length > 0 this method returns a - generator (to be used in a for-loop or with list-comprehension). - The generator yields containers with up to page_length entities. - Otherwise, paging is disabled, as well as for count queries and - when unique is True. Defaults to None. - - Raises: - ------- - PagingConsistencyError - If the database state changed between paged requests. - - Yields - ------ - page : Container - Returns a container with the next `page_length` resulting entities. - - Returns - ------- - results : Container or integer - Returns an integer when it was a `COUNT` query. Otherwise, returns a - Container with the resulting entities. - """ - query = Query(q) - - if flags is not None: - query.flags = flags - - return query.execute(unique=unique, - raise_exception_on_error=raise_exception_on_error, - cache=cache, page_length=page_length) - - -class DropOffBox(list): - def __init__(self, *args, **kwargs): - warn(DeprecationWarning( - "The DropOffBox is deprecated and will be removed in future.")) - super().__init__(*args, **kwargs) - - path = None - - def sync(self): - c = get_connection() - _log_request("GET: Info") - http_response = c.retrieve(["Info"]) - body = http_response.read() - _log_response(body) - - xml = etree.fromstring(body) - - for child in xml: - if child.tag.lower() == "stats": - infoelem = child - - break - - for child in infoelem: - if child.tag.lower() == "dropoffbox": - dropoffboxelem = child - - break - del self[:] - self.path = dropoffboxelem.get('path') - - for f in dropoffboxelem: - self.append(f.get('path')) - - return self - - -class UserInfo(): - - def __init__(self, xml): - self.roles = [role.text for role in xml.findall("Roles/Role")] - self.name = xml.get("username") - self.realm = xml.get("realm") - - -class Info(): - - def __init__(self): - self.messages = Messages() - self.sync() - - def sync(self): - c = get_connection() - try: - http_response = c.retrieve(["Info"]) - except CaosDBConnectionError as conn_e: - print(conn_e) - - return - - xml = etree.fromstring(http_response.read()) - - for e in xml: - m = _parse_single_xml_element(e) - - if isinstance(m, UserInfo): - self.user_info = m - elif isinstance(m, TimeZone): - self.time_zone = m - else: - self.messages.append(m) - - def __str__(self): - if "Counts" not in [m.type for m in self.messages]: - return "caosdb.Info" - - if int(self.messages["counts"]["records"]) > 0: - return "Connection to CaosDB with {} Records." .format( - self.messages["counts"]["records"] - ) - else: - return "Connection to CaosDB without Records." - - def __repr__(self): - return self.__str__() - - -class Permission(): - - def __init__(self, name, description=None): - self.name = name - self.description = description - - def __repr__(self): - return str(self) - - def __str__(self): - return self.name - - def __eq__(self, p): - if isinstance(p, Permission): - return p.name == self.name - - return False - - def __hash__(self): - return hash(self.name) - - -class Permissions(): - - known_permissions = None - - def __init__(self, xml): - self.parse_xml(xml) - - def clear(self): - self._perms = set() - - def parse_xml(self, xml): - self.clear() - - for e in xml: - if e.tag == "Permission": - self._perms.add(Permission(name=e.get("name"), - description=e.get("description"))) - - def __contains__(self, p): - if isinstance(p, Permission): - return p in self._perms - else: - return Permission(name=p) in self._perms - - def __repr__(self): - return str(self) - - def __str__(self): - return str(self._perms) - - -def parse_xml(xml): - """parse a string or tree representation of an xml document to a set of - entities (records, recordtypes, properties, or files). - - @param xml: a string or tree representation of an xml document. - @return: list of entities or single entity. - """ - - if isinstance(xml, etree._Element): - elem = xml - else: - elem = etree.fromstring(xml) - - return _parse_single_xml_element(elem) - - -def _parse_single_xml_element(elem): - classmap = { - 'record': Record, - 'recordtype': RecordType, - 'property': Property, - 'file': File, - 'parent': Parent, - 'entity': Entity} - - if elem.tag.lower() in classmap: - klass = classmap.get(elem.tag.lower()) - entity = klass() - Entity._from_xml(entity, elem) - - return entity - elif elem.tag.lower() == "version": - return Version.from_xml(elem) - elif elem.tag.lower() == "state": - return State.from_xml(elem) - elif elem.tag.lower() == "emptystring": - return "" - elif elem.tag.lower() == "value": - if len(elem) == 1 and elem[0].tag.lower() == "emptystring": - return "" - elif len(elem) == 1 and elem[0].tag.lower() in classmap: - return _parse_single_xml_element(elem[0]) - elif elem.text is None or elem.text.strip() == "": - return None - - return str(elem.text.strip()) - elif elem.tag.lower() == "querytemplate": - return QueryTemplate._from_xml(elem) - elif elem.tag.lower() == 'query': - return Query(elem) - elif elem.tag.lower() == 'history': - return Message(type='History', description=elem.get("transaction")) - elif elem.tag.lower() == 'stats': - counts = elem.find("counts") - - return Message(type="Counts", description=None, body=counts.attrib) - elif elem.tag == "EntityACL": - return ACL(xml=elem) - elif elem.tag == "Permissions": - return Permissions(xml=elem) - elif elem.tag == "UserInfo": - return UserInfo(xml=elem) - elif elem.tag == "TimeZone": - return TimeZone(zone_id=elem.get("id"), offset=elem.get("offset"), - display_name=elem.text.strip()) - else: - return Message(type=elem.tag, code=elem.get( - "code"), description=elem.get("description"), body=elem.text) - - -def _evaluate_and_add_error(parent_error, ent): - """Evaluate the error message(s) attached to entity and add a - corresponding exception to parent_error. - - Parameters: - ----------- - parent_error : TransactionError - Parent error to which the new exception will be attached. This - exception will be a direct child. - ent : Entity - Entity that caused the TransactionError. An exception is - created depending on its error message(s). - - Returns: - -------- - TransactionError : - Parent error with new exception(s) attached to it. - - """ - - if isinstance(ent, (Entity, QueryTemplate)): - # Check all error messages - found114 = False - found116 = False - - for err in ent.get_errors(): - # Evaluate specific EntityErrors depending on the error - # code - - if err.code is not None: - if int(err.code) == 101: # ent doesn't exist - new_exc = EntityDoesNotExistError(entity=ent, - error=err) - elif int(err.code) == 110: # ent has no data type - new_exc = EntityHasNoDatatypeError(entity=ent, - error=err) - elif int(err.code) == 403: # no permission - new_exc = AuthorizationError(entity=ent, - error=err) - elif int(err.code) == 152: # name wasn't unique - new_exc = UniqueNamesError(entity=ent, error=err) - elif int(err.code) == 114: # unqualified properties - found114 = True - new_exc = UnqualifiedPropertiesError(entity=ent, - error=err) - - for prop in ent.get_properties(): - new_exc = _evaluate_and_add_error(new_exc, - prop) - elif int(err.code) == 116: # unqualified parents - found116 = True - new_exc = UnqualifiedParentsError(entity=ent, - error=err) - - for par in ent.get_parents(): - new_exc = _evaluate_and_add_error(new_exc, - par) - else: # General EntityError for other codes - new_exc = EntityError(entity=ent, error=err) - else: # No error code causes a general EntityError, too - new_exc = EntityError(entity=ent, error=err) - parent_error.add_error(new_exc) - # Check for possible errors in parents and properties that - # weren't detected up to here - - if not found114: - dummy_err = EntityError(entity=ent) - - for prop in ent.get_properties(): - dummy_err = _evaluate_and_add_error(dummy_err, prop) - - if dummy_err.errors: - parent_error.add_error(dummy_err) - - if not found116: - dummy_err = EntityError(entity=ent) - - for par in ent.get_parents(): - dummy_err = _evaluate_and_add_error(dummy_err, par) - - if dummy_err.errors: - parent_error.add_error(dummy_err) - - elif isinstance(ent, Container): - parent_error.container = ent - - if ent.get_errors() is not None: - parent_error.code = ent.get_errors()[0].code - # In the highly unusual case of more than one error - # message, attach all of them. - parent_error.msg = '\n'.join( - [x.description for x in ent.get_errors()]) - # Go through all container elements and add them: - - for elt in ent: - parent_error = _evaluate_and_add_error(parent_error, elt) - - else: - raise TypeError("Parameter ent is to be an Entity or a Container") - - return parent_error - - -def raise_errors(arg0): - """Raise a TransactionError depending on the error code(s) inside - Entity, QueryTemplate or Container arg0. More detailed errors may - be attached to the TransactionError depending on the contents of - arg0. - - Parameters: - ----------- - arg0 : Entity, QueryTemplate, or Container - CaosDB object whose messages are evaluated according to their - error codes - - """ - transaction_error = _evaluate_and_add_error(TransactionError(), - arg0) - # Raise if any error was found - - if len(transaction_error.all_errors) > 0: - raise transaction_error - # Cover the special case of an empty container with error - # message(s) (e.g. query syntax error) - - if (transaction_error.container is not None and - transaction_error.container.has_errors()): - raise transaction_error - - -def delete(ids, raise_exception_on_error=True): - c = Container() - - if isinstance(ids, list) or isinstance(ids, range): - for i in ids: - c.append(Entity(id=i)) - else: - c.append(Entity(id=ids)) - - return c.delete(raise_exception_on_error=raise_exception_on_error) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.models`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/state.py b/src/caosdb/common/state.py index cb74022bef57a77c8270b2033c904eecabaadf83..e69fc66345f36c355dcdbf7825583de0a1219c97 100644 --- a/src/caosdb/common/state.py +++ b/src/caosdb/common/state.py @@ -1,198 +1,6 @@ -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -import copy -from lxml import etree +from linkahead.common.state import * +from warnings import warn - -def _translate_to_state_acis(acis): - result = set() - for aci in acis: - aci = copy.copy(aci) - if aci.role: - aci.role = "?STATE?" + aci.role + "?" - result.add(aci) - return result - - -class Transition: - """Transition - - Represents allowed transitions from one state to another. - - Properties - ---------- - name : str - The name of the transition - description: str - The description of the transition - from_state : str - A state name - to_state : str - A state name - """ - - def __init__(self, name, from_state, to_state, description=None): - self._name = name - self._from_state = from_state - self._to_state = to_state - self._description = description - - @property - def name(self): - return self._name - - @property - def description(self): - return self._description - - @property - def from_state(self): - return self._from_state - - @property - def to_state(self): - return self._to_state - - def __repr__(self): - return f'Transition(name="{self.name}", from_state="{self.from_state}", to_state="{self.to_state}", description="{self.description}")' - - def __eq__(self, other): - return (isinstance(other, Transition) - and other.name == self.name - and other.to_state == self.to_state - and other.from_state == self.from_state) - - def __hash__(self): - return 23472 + hash(self.name) + hash(self.from_state) + hash(self.to_state) - - @staticmethod - def from_xml(xml): - to_state = [to.get("name") for to in xml - if to.tag.lower() == "tostate"] - from_state = [from_.get("name") for from_ in xml - if from_.tag.lower() == "fromstate"] - result = Transition(name=xml.get("name"), - description=xml.get("description"), - from_state=from_state[0] if from_state else None, - to_state=to_state[0] if to_state else None) - return result - - -class State: - """State - - Represents the state of an entity and take care of the serialization and - deserialization of xml for the entity state. - - An entity state is always a State of a StateModel. - - Properties - ---------- - name : str - Name of the State - model : str - Name of the StateModel - description : str - Description of the State (read-only) - id : str - Id of the undelying State record (read-only) - transitions : set of Transition - All transitions which are available from this state (read-only) - """ - - def __init__(self, model, name): - self.name = name - self.model = model - self._id = None - self._description = None - self._transitions = None - - @property - def id(self): - return self._id - - @property - def description(self): - return self._description - - @property - def transitions(self): - return self._transitions - - def __eq__(self, other): - return (isinstance(other, State) - and self.name == other.name - and self.model == other.model) - - def __hash__(self): - return hash(self.name) + hash(self.model) - - def __repr__(self): - return f"State('{self.model}', '{self.name}')" - - def to_xml(self): - """Serialize this State to xml. - - Returns - ------- - xml : etree.Element - """ - xml = etree.Element("State") - if self.name is not None: - xml.set("name", self.name) - if self.model is not None: - xml.set("model", self.model) - return xml - - @staticmethod - def from_xml(xml): - """Create a new State instance from an xml Element. - - Parameters - ---------- - xml : etree.Element - - Returns - ------- - state : State - """ - name = xml.get("name") - model = xml.get("model") - result = State(name=name, model=model) - result._id = xml.get("id") - result._description = xml.get("description") - transitions = [Transition.from_xml(t) for t in xml if t.tag.lower() == - "transition"] - if transitions: - result._transitions = set(transitions) - - return result - - @staticmethod - def create_state_acl(acl): - from .models import ACL - state_acl = ACL() - state_acl._grants = _translate_to_state_acis(acl._grants) - state_acl._denials = _translate_to_state_acis(acl._denials) - state_acl._priority_grants = _translate_to_state_acis(acl._priority_grants) - state_acl._priority_denials = _translate_to_state_acis(acl._priority_denials) - return state_acl +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.state`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/timezone.py b/src/caosdb/common/timezone.py index 8fc5e710d3cbf6f20cf81397573f972db3b22f12..1b6ab31be17ef47ff0519bc53479a2958441ca46 100644 --- a/src/caosdb/common/timezone.py +++ b/src/caosdb/common/timezone.py @@ -1,19 +1,6 @@ -class TimeZone(): - """ - TimeZone, e.g. CEST, Europe/Berlin, UTC+4. +from linkahead.common.timezone import * +from warnings import warn - Attributes - ---------- - zone_id : string - ID of the time zone. - offset : int - Offset to UTC in seconds. - display_name : string - A human-friendly name of the time zone: - """ - - def __init__(self, zone_id, offset, display_name): - self.zone_id = zone_id - self.offset = offset - self.display_name = display_name +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.timezone`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/utils.py b/src/caosdb/common/utils.py index f0ce740d38d90b0c7bb1031e808b83efb2207a43..2fb496219409f8bdbea57f4b4f0f306e3ff14128 100644 --- a/src/caosdb/common/utils.py +++ b/src/caosdb/common/utils.py @@ -1,56 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""mising docstring.""" -from lxml import etree -from multiprocessing import Lock -from uuid import uuid4 -_uuid_lock = Lock() +from linkahead.common.utils import * +from warnings import warn - -def xml2str(xml): - return etree.tostring(xml, pretty_print=True, encoding='unicode') - - -def uuid(): - exc = None - ret = None - try: - _uuid_lock.acquire() - ret = uuid4() - except Exception as e: - exc = e - finally: - _uuid_lock.release() - if exc: - raise exc - return ret - - -def is_int(obj): - try: - int(obj) - return True - except ValueError: - return False +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.utils`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/common/versioning.py b/src/caosdb/common/versioning.py index 2875486a13347a2eb834d22580497033699ebd37..957e8d0b1485bb90bc89787dc1962d0487dd30c3 100644 --- a/src/caosdb/common/versioning.py +++ b/src/caosdb/common/versioning.py @@ -1,250 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -""" Versioning module for anything related to entity versions. -Currently this module defines nothing but a single class, `Version`. -""" +from linkahead.common.versioning import * +from warnings import warn -from __future__ import absolute_import -from caosdb.common.utils import xml2str -from lxml import etree - - -class Version(): - """The version of an entity. - - An entity version has a version id (string), a date (UTC timestamp), a - list of predecessors and a list of successors. - - Parameters - ---------- - id : str, optional - See attribute `id`. Default: None - date : str, optional - See attribute `date`. Default: None - username : str, optional - See attribute `username`. Default: None - realm : str, optional - See attribute `realm`. Default: None - predecessors : list of Version, optional - See attribute `predecessors`. Default: empty list. - successors : list of Version, optional - See attribute `successors`. Default: empty list. - is_head : bool - See attribute `is_head`. Default: False - is_complete_history : bool - See attribute `is_complete_history`. Default: False - - Attributes - ---------- - id : str - Version ID (not the entity's id). - date : str - UTC Timestamp of the version, i.e. the date and time when the entity of - this version has been inserted or modified. - username : str - The username of the user who inserted or updated this version. - realm : str - The realm of the user who inserted or updated this version. - predecessors : list of Version - Predecessors are the older entity versions which have been modified - into this version. Usually, there is only one predecessor. However, - this API allows for entities to be merged into one entity, which would - result in more than one predecessor. - successors : list of Version - Successors are newer versions of this entity. If there are successors, - this version is not the latest version of this entity. Usually, there - is only one successor. However, this API allows that a single entity - may co-exist in several versions (e.g. several proposals for the next - entity status). That would result in more than one successor. - is_head : bool or string - If true, this indicates that this version is the HEAD if true. - Otherwise it is not known whether this is the head or not. Any string - matching "true" (case-insensitively) is regarded as True. - Nota bene: This property should typically be set if the server response - indicated that this is the head version. - is_complete_history : bool or string - If true, this indicates that this version contains the full version - history. That means, that the predecessors and successors have their - respective predecessors and successors attached as well and the tree is - completely available. Any string matching "true" (case-insensitively) - is regarded as True. - Nota bene: This property should typically be set if the server response - indicated that the full version history is included in its response. - """ - - # pylint: disable=redefined-builtin - def __init__(self, id=None, date=None, username=None, realm=None, - predecessors=None, successors=None, is_head=False, - is_complete_history=False): - """Typically the `predecessors` or `successors` should not "link back" to an existing Version -object.""" - self.id = id - self.date = date - self.username = username - self.realm = realm - self.predecessors = predecessors if predecessors is not None else [] - self.successors = successors if successors is not None else [] - self.is_head = str(is_head).lower() == "true" - self.is_complete_history = str(is_complete_history).lower() == "true" - - def get_history(self): - """ Returns a flat list of Version instances representing the history - of the entity. - - The list items are ordered by the relation between the versions, - starting with the oldest version. - - The items in the list have no predecessors or successors attached. - - Note: This method only returns reliable results if - `self.is_complete_history is True` and it will not retrieve the full - version history if it is not present. - - Returns - ------- - list of Version - """ - versions = [] - for p in self.predecessors: - # assuming that predecessors don't have any successors - versions = p.get_history() - versions.append(Version(id=self.id, date=self.date, - username=self.username, realm=self.realm)) - for s in self.successors: - # assuming that successors don't have any predecessors - versions.extend(s.get_history()) - return versions - - def to_xml(self, tag="Version"): - """Serialize this version to xml. - - The tag name is 'Version' per default. But since this method is called - recursively for the predecessors and successors as well, the tag name - can be configured. - - The resulting xml element contains attributes 'id' and 'date' and - 'Predecessor' and 'Successor' child elements. - - Parameters - ---------- - tag : str, optional - The name of the returned xml element. Defaults to 'Version'. - - Returns - ------- - xml : etree.Element - """ - xml = etree.Element(tag) - if self.id is not None: - xml.set("id", self.id) - if self.date is not None: - xml.set("date", self.date) - if self.username is not None: - xml.set("username", self.username) - if self.realm is not None: - xml.set("realm", self.realm) - if self.predecessors is not None: - for p in self.predecessors: - xml.append(p.to_xml(tag="Predecessor")) - if self.is_head is True: - xml.set("head", "true") - if self.successors is not None: - for s in self.successors: - xml.append(s.to_xml(tag="Successor")) - return xml - - def __str__(self): - """Return a stringified xml representation.""" - return self.__repr__() - - def __repr__(self): - """Return a stringified xml representation.""" - return xml2str(self.to_xml()) - - @staticmethod - def from_xml(xml): - """Parse a version object from a 'Version' xml element. - - Parameters - ---------- - xml : etree.Element - A 'Version' xml element, with 'id', possibly 'date', `username`, - `realm`, and `head` attributes as well as 'Predecessor' and - 'Successor' child elements. - - Returns - ------- - version : Version - a new version instance - """ - predecessors = [Version.from_xml(p) for p in xml if p.tag.lower() == "predecessor"] - successors = [Version.from_xml(s) for s in xml if s.tag.lower() == "successor"] - return Version(id=xml.get("id"), date=xml.get("date"), - is_head=xml.get("head"), - is_complete_history=xml.get("completeHistory"), - username=xml.get("username"), realm=xml.get("realm"), - predecessors=predecessors, successors=successors) - - def __hash__(self): - """Hash of the version instance. - - Also hashes the predecessors and successors. - """ - return (hash(self.id) - + hash(self.date) - + (Version._hash_list(self.predecessors) - if self.predecessors else 26335) - + (Version._hash_list(self.successors) - if self.successors else -23432)) - - @staticmethod - def _hash_list(_list): - """Hash a list by hashing each element and its index.""" - result = 12352 - for idx, val in enumerate(_list): - result += hash(val) + idx - return result - - @staticmethod - def _eq_list(this, that): - """List equality. - - List equality is defined as equality of each element, the order - and length. - """ - if len(this) != len(that): - return False - for v1, v2 in zip(this, that): - if v1 != v2: - return False - return True - - def __eq__(self, other): - """Equality of versions is defined by equality of id, date, and list - equality of the predecessors and successors.""" - return (self.id == other.id - and self.date == other.date - and Version._eq_list(self.predecessors, other.predecessors) - and Version._eq_list(self.successors, other.successors)) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.common.versioning`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/configuration.py b/src/caosdb/configuration.py index 1c108ac1d39c135dbc90f477be8b8f2f630391ce..07a0e99c77bca207292d7fcb3add5d8a06303fdb 100644 --- a/src/caosdb/configuration.py +++ b/src/caosdb/configuration.py @@ -1,118 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -import os -import yaml -import warnings -try: - optional_jsonschema_validate = None - from jsonschema import validate as optional_jsonschema_validate -except ImportError: - pass +from linkahead.configuration import * +from warnings import warn -from configparser import ConfigParser - -from os import environ, getcwd -from os.path import expanduser, join, isfile - - -def _reset_config(): - global _pycaosdbconf - _pycaosdbconf = ConfigParser(allow_no_value=False) - - -def configure(inifile): - """read config from file. - - Return a list of files which have successfully been parsed. - """ - global _pycaosdbconf - if "_pycaosdbconf" not in globals(): - _pycaosdbconf = None - if _pycaosdbconf is None: - _reset_config() - read_config = _pycaosdbconf.read(inifile) - validate_yaml_schema(config_to_yaml(_pycaosdbconf)) - - if "HTTPS_PROXY" in environ: - _pycaosdbconf["Connection"]["https_proxy"] = environ["HTTPS_PROXY"] - if "HTTP_PROXY" in environ: - _pycaosdbconf["Connection"]["http_proxy"] = environ["HTTP_PROXY"] - return read_config - - -def get_config(): - global _pycaosdbconf - return _pycaosdbconf - - -def config_to_yaml(config): - valobj = {} - for s in config.sections(): - valobj[s] = {} - for key, value in config[s].items(): - # TODO: Can the type be inferred from the config object? - if key in ["timeout", "debug"]: - valobj[s][key] = int(value) - elif key in ["ssl_insecure"]: - valobj[s][key] = bool(value) - else: - valobj[s][key] = value - - return valobj - - -def validate_yaml_schema(valobj): - if optional_jsonschema_validate: - with open(os.path.join(os.path.dirname(__file__), "schema-pycaosdb-ini.yml")) as f: - schema = yaml.load(f, Loader=yaml.SafeLoader) - optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"]) - else: - warnings.warn(""" - Warning: The validation could not be performed because `jsonschema` is not installed. - """) - - -def _read_config_files(): - """Function to read config files from different paths. - - Checks for path either in ``$PYCAOSDBINI`` or home directory (``.pycaosdb.ini``), and - additionally in the current working directory (``pycaosdb.ini``). - - Returns - ------- - - ini files: list - The successfully parsed ini-files. Order: env_var or home directory, cwd. Used for testing the function. - - """ - return_var = [] - if "PYCAOSDBINI" in environ: - return_var.extend(configure(expanduser(environ["PYCAOSDBINI"]))) - else: - return_var.extend(configure(expanduser('~/.pycaosdb.ini'))) - - if isfile(join(getcwd(), "pycaosdb.ini")): - return_var.extend(configure(join(getcwd(), "pycaosdb.ini"))) - return return_var +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.configuration`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/__init__.py b/src/caosdb/connection/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..bc9dc0cf93eb83588d2d7b13334ce6383b1c8a7d 100644 --- a/src/caosdb/connection/__init__.py +++ b/src/caosdb/connection/__init__.py @@ -0,0 +1,6 @@ + +from linkahead.connection import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/__init__.py b/src/caosdb/connection/authentication/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..cad5725dc1937940812a3ec8abea6a47763210f3 100644 --- a/src/caosdb/connection/authentication/__init__.py +++ b/src/caosdb/connection/authentication/__init__.py @@ -0,0 +1,6 @@ + +from linkahead.connection.authentication import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/auth_token.py b/src/caosdb/connection/authentication/auth_token.py index 688123867f68153d3631bb8559baa235f6f02da5..fb715ef218fd1fabacde2e060713d89bf24bf546 100644 --- a/src/caosdb/connection/authentication/auth_token.py +++ b/src/caosdb/connection/authentication/auth_token.py @@ -1,96 +1,6 @@ -#! -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# Copyright (C) 2020 Timm Fitschen <f.fitschen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""auth_token. -An Authentictor which only uses only a pre-supplied authentication token. -""" -from __future__ import absolute_import, unicode_literals, print_function -from .interface import AbstractAuthenticator, CaosDBServerConnection -from caosdb.connection.utils import auth_token_to_cookie -from caosdb.exceptions import LoginFailedError +from linkahead.connection.authentication.auth_token import * +from warnings import warn - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which only uses a pre-supplied authentication - token. - - Returns - ------- - AuthTokenAuthenticator - """ - return AuthTokenAuthenticator() - - -class AuthTokenAuthenticator(AbstractAuthenticator): - """AuthTokenAuthenticator. - - Subclass of AbstractAuthenticator which provides authentication only via - a given authentication token. - - Methods - ------- - login - logout - configure - """ - - def __init__(self): - super(AuthTokenAuthenticator, self).__init__() - self.auth_token = None - self._connection = None - - def login(self): - self._login() - - def _login(self): - raise LoginFailedError("The authentication token is expired or you " - "have been logged out otherwise. The " - "auth_token authenticator cannot log in " - "again. You must provide a new " - "authentication token.") - - def logout(self): - self._logout() - - def _logout(self): - self.logger.debug("[LOGOUT]") - if self.auth_token is not None: - headers = {'Cookie': auth_token_to_cookie(self.auth_token)} - self._connection.request(method="DELETE", path="logout", - headers=headers) - self.auth_token = None - - def configure(self, **config): - if "auth_token" in config: - self.auth_token = config["auth_token"] - if "connection" in config: - self._connection = config["connection"] - if not isinstance(self._connection, CaosDBServerConnection): - raise Exception("""Bad configuration of the caosdb connection. - The `connection` must be an instance of - `CaosDBConnection`.""") +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.auth_token`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/external_credentials_provider.py b/src/caosdb/connection/authentication/external_credentials_provider.py index 1cf4cefc90de2a0281528eb83c1efe55eda4f345..657c8e9d7016628e48ff0edae9c770fed880260f 100644 --- a/src/caosdb/connection/authentication/external_credentials_provider.py +++ b/src/caosdb/connection/authentication/external_credentials_provider.py @@ -1,92 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""external_credentials_provider.""" -from __future__ import absolute_import, unicode_literals -from abc import ABCMeta -import logging -from .plain import PlainTextCredentialsProvider -# meta class compatible with Python 2 *and* 3: -ABC = ABCMeta(str('ABC'), (object, ), {str('__slots__'): ()}) +from linkahead.connection.authentication.external_credentials_provider import * +from warnings import warn - -class ExternalCredentialsProvider(PlainTextCredentialsProvider, ABC): - """ExternalCredentialsProvider. - - Abstract subclass of PlainTextCredentialsProvider which should be used to - implement external credentials provider (e.g. pass, keyring, or any other call - to an external program, which presents the plain text password, which is to be - used for the authentication. - - Parameters - ---------- - callback: Function - A function which has **kwargs argument. This funktion will be called - each time a password is needed with the current connection - configuration as parameters. - """ - - def __init__(self, callback): - super(ExternalCredentialsProvider, self).__init__() - self._callback = callback - self._config = None - - def configure(self, **config): - """configure. - - Parameters - ---------- - **config - Keyword arguments containing the necessary arguments for the - concrete implementation of this class. - - Attributes - ---------- - password : str - The password. This password is not stored in this class. A callback - is called to provide the password each time this property is - called. - - Returns - ------- - None - """ - if "password" in config: - if "password_method" in config: - authm = "`{}`".format(config["password_method"]) - else: - authm = "an external credentials provider" - self.logger.log(logging.WARNING, - ("`password` defined. You configured caosdb to " - "use %s as authentication method and yet " - "provided a password yourself. This indicates " - "a misconfiguration (e.g. in your " - "pycaosdb.ini) and should be avoided."), - authm) - self._config = dict(config) - super(ExternalCredentialsProvider, self).configure(**config) - - @property - def password(self): - return self._callback(**self._config) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.external_credentials_provider`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/input.py b/src/caosdb/connection/authentication/input.py index 7edec7c814d9019d43701b33d4022063081714a7..d416cfbe86743382f1535ed02d4e149f6b5355ca 100644 --- a/src/caosdb/connection/authentication/input.py +++ b/src/caosdb/connection/authentication/input.py @@ -1,95 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""input. -A CredentialsProvider which reads the password from the input line. -""" -from __future__ import absolute_import, unicode_literals, print_function -from .interface import CredentialsProvider, CredentialsAuthenticator +from linkahead.connection.authentication.input import * +from warnings import warn -import getpass - - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which uses the input for username/password credentials. - - Returns - ------- - CredentialsAuthenticator - with an InputCredentialsProvider as back-end. - """ - return CredentialsAuthenticator(InputCredentialsProvider()) - - -class InputCredentialsProvider(CredentialsProvider): - """InputCredentialsProvider. - - A class for obtaining the password directly from the user. - - Methods - ------- - configure - - Attributes - ---------- - password - username - """ - - def __init__(self): - super(InputCredentialsProvider, self).__init__() - self._password = None - self._username = None - - def configure(self, **config): - """configure. - - Parameters - ---------- - **config - Keyword arguments containing at least keywords "username" and "password". - - Returns - ------- - None - """ - if config.get("username"): - self._username = config["username"] - else: - self._username = input("Please enter the user name: ") - - url = config["url"] - self._password = getpass.getpass( - "Please enter the password for `{}` at `{}`: ".format( - self._username, url)) - - @property - def password(self): - return self._password - - @property - def username(self): - return self._username +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.input`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/interface.py b/src/caosdb/connection/authentication/interface.py index f2cc5001cf8fa0f6d61ec65346f6a200ba0dfcd8..a92214632bb5e2dde3ee95413413446b340eeb70 100644 --- a/src/caosdb/connection/authentication/interface.py +++ b/src/caosdb/connection/authentication/interface.py @@ -1,271 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""This module provides the interfaces for authenticating requests to the -caosdb server. -Implementing modules muts provide a `get_authentication_provider()` method. -""" -from abc import ABCMeta, abstractmethod, abstractproperty -import logging -from caosdb.connection.utils import urlencode -from caosdb.connection.interface import CaosDBServerConnection -from caosdb.connection.utils import parse_auth_token, auth_token_to_cookie -from caosdb.exceptions import LoginFailedError +from linkahead.connection.authentication.interface import * +from warnings import warn -# meta class compatible with Python 2 *and* 3: -ABC = ABCMeta('ABC', (object, ), {'__slots__': ()}) - -_LOGGER = logging.getLogger(__name__) - - -class AbstractAuthenticator(ABC): - """AbstractAuthenticator. - - Interface for different authentication mechanisms. e.g. username/password - authentication or SSH key authentication. - - Attributes - ---------- - logger : Logger - A logger which should be used for all logging which has to do with - authentication. - auth_token : str - A string representation of a CaosDB Auth Token. - - Methods - ------- - login (abstract) - logout (abstract) - configure (abstract) - on_request - on_response - - """ - - def __init__(self): - self.auth_token = None - self.logger = _LOGGER - - @abstractmethod - def login(self): - """login. - - To be implemented by the child classes. - - Returns - ------- - None - """ - pass - - @abstractmethod - def logout(self): - """logout. - - To be implemented by the child classes. - - Returns - ------- - None - """ - pass - - @abstractmethod - def configure(self, **config): - """configure. - - Configure this authenticator. - - Parameters - ---------- - **config - Keyword arguments for the configuration. - - Returns - ------- - None - """ - pass - - def on_response(self, response): - """on_response. - - A call-back with is to be called by the connection after each - response. This method reads the latest auth cookie from the response. - - Parameters - ---------- - response : CaosDBHTTPResponse - The response of the server - - Returns - ------- - """ - self.auth_token = parse_auth_token( - response.getheader("Set-Cookie")) - - def on_request(self, method, path, headers, **kwargs): - # pylint: disable=unused-argument - """on_request. - - A call-back which is to be called by the connection before each - request. This method set the auth cookie for that request. - - Parameters - ---------- - method : str - The request method. - path : str - The request path. - headers : dict - A dictionary with headers which are to be set. - **kwargs - Ignored - - Returns - ------- - """ - if self.auth_token is None: - self.login() - if self.auth_token is not None: - headers['Cookie'] = auth_token_to_cookie(self.auth_token) - - -class CredentialsAuthenticator(AbstractAuthenticator): - """CredentialsAuthenticator. - - Subclass of AbstractAuthenticator which provides authentication via - credentials (username/password). This class always needs a - credentials_provider which provides valid credentials_provider before each - login. - - Parameters - ---------- - credentials_provider : CredentialsProvider - The source for the username and the password. - - Methods - ------- - login - logout - configure - """ - - def __init__(self, credentials_provider): - super(CredentialsAuthenticator, self).__init__() - self._credentials_provider = credentials_provider - self._connection = None - self.auth_token = None - - def login(self): - self._login() - - def logout(self): - self._logout() - - def _logout(self): - self.logger.debug("[LOGOUT]") - if self.auth_token is not None: - self._connection.request(method="DELETE", path="logout") - self.auth_token = None - - def _login(self): - username = self._credentials_provider.username - password = self._credentials_provider.password - self.logger.debug("[LOGIN] %s", username) - - # we need a username for this: - if username is None: - raise LoginFailedError("No username was given.") - if password is None: - raise LoginFailedError("No password was given") - - headers = {} - headers["Content-Type"] = "application/x-www-form-urlencoded" - body = urlencode({"username": username, "password": password}) - response = self._connection.request(method="POST", - path="login", - headers=headers, body=body) - - response.read() # clear socket - if response.status != 200: - raise LoginFailedError("LOGIN WAS NOT SUCCESSFUL") - self.on_response(response) - return response - - def configure(self, **config): - self._credentials_provider.configure(**config) - if "connection" in config: - self._connection = config["connection"] - if not isinstance(self._connection, CaosDBServerConnection): - raise Exception("""Bad configuration of the caosdb connection. - The `connection` must be an instance of - `CaosDBConnection`.""") - - -class CredentialsProvider(ABC): - """CredentialsProvider. - - An abstract class for username/password authentication. - - Attributes - ---------- - password (abstract) - username (abstract) - logger : Logger - A logger which should be used for all logging which has to do with the - provision of credentials. This is usually just the "authentication" - logger. - - Methods - ------- - configure (abstract) - """ - - def __init__(self): - self.logger = _LOGGER - - @abstractmethod - def configure(self, **config): - """configure. - - Configure the credentials provider with a dict. - - Parameters - ---------- - **config - Keyword arguments. The relevant arguments depend on the - implementing subclass of this class. - Returns - ------- - None - """ - - @abstractproperty - def password(self): - """password.""" - - @abstractproperty - def username(self): - """username.""" +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.interface`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/keyring.py b/src/caosdb/connection/authentication/keyring.py index 99d184136c20b23557efea0b54c648095a8d3ab2..f66041b14de039cf3a02c964fd931a6f553cbc8b 100644 --- a/src/caosdb/connection/authentication/keyring.py +++ b/src/caosdb/connection/authentication/keyring.py @@ -1,98 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""keyring. -A CredentialsProvider which uses the external 'keyring' library to -retrieve the password. -""" +from linkahead.connection.authentication.keyring import * +from warnings import warn -import sys -import importlib -from getpass import getpass -from caosdb.exceptions import ConfigurationError -from .external_credentials_provider import ExternalCredentialsProvider -from .interface import CredentialsAuthenticator - - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which uses plain text username/password credentials. - The difference to the `plain` module is that this implementation retrieves - the password from the external gnome keyring. - - Returns - ------- - CredentialsAuthenticator - with a 'KeyringCaller' as back-end. - """ - return CredentialsAuthenticator(KeyringCaller(callback=_call_keyring)) - - -def _get_external_keyring(): - try: - return importlib.import_module("keyring") - except ImportError: - raise RuntimeError( - "The keyring password method requires installation of the" - "keyring python package. On linux with python < 3.5, " - "this requires the installation of dbus-python as a " - "system package.") - - -def _call_keyring(**config): - if "username" not in config: - raise ConfigurationError("Your configuration did not provide a " - "`username` which is needed by the " - "`KeyringCaller` to retrieve the " - "password in question.") - url = config.get("url") - username = config.get("username") - app = "caosdb — {}".format(url) - external_keyring = _get_external_keyring() - password = external_keyring.get_password(app, username) - if password is None: - print("No password for user {} on {} found in keyring." - .format(username, app)) - password = getpass("Enter password to save " - "in system keyring/wallet: ") - external_keyring.set_password(app, username, password) - return password - - -class KeyringCaller(ExternalCredentialsProvider): - """KeyringCaller. - - A class for retrieving the password from the external 'gnome keyring' and - storing the username/password credentials as plain text strings. - - Methods - ------- - configure - - Attributes - ---------- - password - username - """ +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.keyring`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/pass.py b/src/caosdb/connection/authentication/pass.py index 853cdf0ed92039e7b5fc9beda8bb76cc0f3cc030..cffa34b2fa827ab214b398d22231c845d81fe533 100644 --- a/src/caosdb/connection/authentication/pass.py +++ b/src/caosdb/connection/authentication/pass.py @@ -1,85 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""pass. -A CredentialsProvider which calls the external program 'pass' for the -password. -""" +from linkahead.connection.authentication.pass import * +from warnings import warn -from subprocess import check_output, CalledProcessError -from caosdb.exceptions import ConfigurationError -from .interface import CredentialsAuthenticator -from .external_credentials_provider import ExternalCredentialsProvider - - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which uses plain text username/password credentials. - The difference to the `plain` module is that this implementation retrieves - the password from the external program 'pass'. - - Returns - ------- - CredentialsAuthenticator - with a 'PassCaller' as back-end. - """ - return CredentialsAuthenticator(PassCaller(callback=_call_pass)) - - -def _call_pass(**config): - if "password_identifier" not in config: - raise ConfigurationError("Your configuration did not provide a " - "`password_identifier` which is needed " - "by the `PassCaller` to retrieve the " - "password in question.") - - try: - return check_output( - "pass " + config["password_identifier"], - shell=True).splitlines()[0].decode("UTF-8") - except CalledProcessError as exc: - raise RuntimeError( - "Password manager returned error code {}. This usually " - "occurs if the password_identifier in .pycaosdb.ini is " - "incorrect or missing.".format(exc.returncode)) - - -class PassCaller(ExternalCredentialsProvider): - """PassCaller. - - A class for retrieving the password from the external program 'pass' and - storing the username/password credentials as plain text strings. - - Methods - ------- - configure - - Attributes - ---------- - password - username - """ - # all the work is done in _call_pass and the super class - pass +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.pass`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/plain.py b/src/caosdb/connection/authentication/plain.py index 83dd592940a7010d07112f73b9bd5bcf3741a168..5e4a8b52a5ce0f80bb076085045ed4b19824ffc5 100644 --- a/src/caosdb/connection/authentication/plain.py +++ b/src/caosdb/connection/authentication/plain.py @@ -1,89 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""plain. -A CredentialsProvider which reads the password from the configuration -dict. -""" -from __future__ import absolute_import, unicode_literals, print_function -from .interface import CredentialsProvider, CredentialsAuthenticator +from linkahead.connection.authentication.plain import * +from warnings import warn - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which uses plain text username/password credentials. - - Returns - ------- - CredentialsAuthenticator - with a PlainTextCredentialsProvider as back-end. - """ - return CredentialsAuthenticator(PlainTextCredentialsProvider()) - - -class PlainTextCredentialsProvider(CredentialsProvider): - """PlainTextCredentialsProvider. - - A class for storing username/password credentials as plain text strings. - - Methods - ------- - configure - - Attributes - ---------- - password - username - """ - - def __init__(self): - super(PlainTextCredentialsProvider, self).__init__() - self._password = None - self._username = None - - def configure(self, **config): - """configure. - - Parameters - ---------- - **config - Keyword arguments containing at least keywords "username" and "password". - - Returns - ------- - None - """ - if "password" in config: - self._password = config["password"] - if "username" in config: - self._username = config["username"] - - @property - def password(self): - return self._password - - @property - def username(self): - return self._username +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.plain`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/authentication/unauthenticated.py b/src/caosdb/connection/authentication/unauthenticated.py index 65febae8fd8f02f3ee0d339fafb36af512fc7be7..b16090e51cc7cc55286ff4d5109f3aa42617b92a 100644 --- a/src/caosdb/connection/authentication/unauthenticated.py +++ b/src/caosdb/connection/authentication/unauthenticated.py @@ -1,119 +1,6 @@ -#! -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# Copyright (C) 2020 Timm Fitschen <f.fitschen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""unauthenticated. -An Authenticator which suppresses any authentication and also ignores auth_token -cookies. -""" -from __future__ import absolute_import, unicode_literals, print_function -from .interface import AbstractAuthenticator, CaosDBServerConnection -from caosdb.exceptions import LoginFailedError +from linkahead.connection.authentication.unauthenticated import * +from warnings import warn - -def get_authentication_provider(): - """get_authentication_provider. - - Return an authenticator which only uses a pre-supplied authentication - token. - - Returns - ------- - AuthTokenAuthenticator - """ - return Unauthenticated() - - -class Unauthenticated(AbstractAuthenticator): - """Unauthenticated. - - Subclass of AbstractAuthenticator which suppresses any authentication and - also ignores auth_token cookies. - - Methods - ------- - login - logout - configure - on_request - on_response - """ - - def __init__(self): - super(Unauthenticated, self).__init__() - self.auth_token = None - self._connection = None - - def login(self): - self._login() - - def _login(self): - raise LoginFailedError("This caosdb client is configured to stay " - "unauthenticated. Change your " - "`password_method` and provide an " - "`auth_token` or credentials if you want " - "to authenticate this client.") - - def logout(self): - self._logout() - - def _logout(self): - self.auth_token = None - - def configure(self, **config): - self.auth_token = None - - def on_request(self, method, path, headers, **kwargs): - # pylint: disable=unused-argument - """on_request. - - This implementation does not attempt to login or authenticate in any - form. - - Parameters - ---------- - method - unused - path - unused - headers - unused - **kwargs - unused - """ - pass - - def on_response(self, response): - # pylint: disable=unused-argument - """on_response. - - This implementation ignores any auth_token cookie sent by the server. - - Parameters - ---------- - response - unused - """ - pass +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.authentication.unauthenticated`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/connection.py b/src/caosdb/connection/connection.py index 46dadea9dfcfa6e614493b75d709f604aa188ef6..956f2fa3421831d9a0304d958787e8eb08ab47fb 100644 --- a/src/caosdb/connection/connection.py +++ b/src/caosdb/connection/connection.py @@ -1,712 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (c) 2019 Daniel Hornung -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""Connection to a CaosDB server.""" -from __future__ import absolute_import, print_function, unicode_literals -import logging -import ssl -import sys -import warnings -from builtins import str # pylint: disable=redefined-builtin -from errno import EPIPE as BrokenPipe -from socket import error as SocketError -from urllib.parse import quote, urlparse -from requests import Session as HTTPSession -from requests.exceptions import ConnectionError as HTTPConnectionError -from urllib3.poolmanager import PoolManager -from requests.adapters import HTTPAdapter +from linkahead.connection.connection import * +from warnings import warn -from caosdb.configuration import get_config -from caosdb.exceptions import (CaosDBException, HTTPClientError, - ConfigurationError, - CaosDBConnectionError, - HTTPForbiddenError, - LoginFailedError, - HTTPResourceNotFoundError, - HTTPServerError, - HTTPURITooLongError) -try: - from caosdb.version import version -except ModuleNotFoundError: - version = "uninstalled" - -from pkg_resources import resource_filename - -from .interface import CaosDBHTTPResponse, CaosDBServerConnection -from .utils import make_uri_path, parse_url, urlencode -from .encode import MultipartYielder, ReadableMultiparts - -_LOGGER = logging.getLogger(__name__) - - -class _WrappedHTTPResponse(CaosDBHTTPResponse): - - def __init__(self, response): - self.response = response - self._generator = None - self._buffer = b'' - self._stream_consumed = False - - @property - def reason(self): - return self.response.reason - - @property - def status(self): - return self.response.status_code - - def read(self, size=None): - if self._stream_consumed is True: - raise RuntimeError("Stream is consumed") - - if self._buffer is None: - # the buffer has been drained in the previous call. - self._stream_consumed = True - return b'' - - if self._generator is None and (size is None or size == 0): - # return full content at once - self._stream_consumed = True - return self.response.content - - if len(self._buffer) >= size: - # still enough bytes in the buffer - result = chunk[:size] - self._buffer = chunk[size:] - return result - - if self._generator is None: - # first call to this method - if size is None or size == 0: - size = 512 - self._generator = self.response.iter_content(size) - - try: - # read new data into the buffer - chunk = self._buffer + next(self._generator) - result = chunk[:size] - if len(result) == 0: - self._stream_consumed = True - self._buffer = chunk[size:] - return result - except StopIteration: - # drain buffer - result = self._buffer - self._buffer = None - return result - - def getheader(self, name, default=None): - return self.response.headers[name] if name in self.response.headers else default - - def getheaders(self): - return self.response.headers.items() - - def close(self): - self.response.close() - - -class _SSLAdapter(HTTPAdapter): - """Transport adapter that allows us to use different SSL versions.""" - - def __init__(self, ssl_version): - self.ssl_version = ssl_version - super().__init__() - - def init_poolmanager(self, connections, maxsize, block=False): - self.poolmanager = PoolManager( - num_pools=connections, maxsize=maxsize, - block=block, ssl_version=self.ssl_version) - - -class _DefaultCaosDBServerConnection(CaosDBServerConnection): - """_DefaultCaosDBServerConnection. - - Methods - ------- - configure - request - """ - - def __init__(self): - self._useragent = ("caosdb-pylib/{version} - {implementation}".format( - version=version, implementation=type(self).__name__)) - self._base_path = None - self._session = None - self._timeout = None - - def request(self, method, path, headers=None, body=None): - """request. - - Send a HTTP request to the server. - - Parameters - ---------- - method : str - The HTTP request method. - path : str - An URI path segment (without the 'scheme://host:port/' parts), - including query and frament segments. - headers : dict of str -> str, optional - HTTP request headers. (Defautl: None) - body : str or bytes or readable, optional - The body of the HTTP request. Bytes should be a utf-8 encoded - string. - - Returns - ------- - response : CaosDBHTTPResponse - """ - - if headers is None: - headers = {} - headers["User-Agent"] = self._useragent - - if path.endswith("/."): - path = path[:-1] + "%2E" - - if isinstance(body, MultipartYielder): - body = ReadableMultiparts(body) - - try: - response = self._session.request( - method=method, - url=self._base_path + path, - headers=headers, - data=body, - timeout=self._timeout, - stream=True) - - return _WrappedHTTPResponse(response) - except HTTPConnectionError as conn_err: - raise CaosDBConnectionError( - "Connection failed. Network or server down? " + str(conn_err) - ) - - def configure(self, **config): - """configure. - - Configure the http connection. - - Parameters - ---------- - cacert : str - Path to the CA certificate which will be used to identify the - server. - url : str - The url of the CaosDB Server, e.g. - `https://example.com:443/rootpath`, including a possible root path. - **config : - Any further keyword arguments are being ignored. - - Raises - ------ - CaosDBConnectionError - If no url has been specified, or if the CA certificate cannot be - loaded. - """ - - if "url" not in config: - raise CaosDBConnectionError( - "No connection url specified. Please " - "do so via caosdb.configure_connection(...) or in a config " - "file.") - if (not config["url"].lower().startswith("https://") and not config["url"].lower().startswith("http://")): - raise CaosDBConnectionError("The connection url is expected " - "to be a http or https url and " - "must include the url scheme " - "(i.e. start with https:// or " - "http://).") - - url = urlparse(config["url"]) - path = url.path.strip("/") - if len(path) > 0: - path = path + "/" - self._base_path = url.scheme + "://" + url.netloc + "/" + path - - self._session = HTTPSession() - - if url.scheme == "https": - self._setup_ssl(config) - - # TODO(tf) remove in next release - socket_proxy = config["socket_proxy"] if "socket_proxy" in config else None - if socket_proxy is not None: - self._session.proxies = { - "https": "socks5://" + socket_proxy, - "http": "socks5://" + socket_proxy, - } - - if "https_proxy" in config: - if self._session.proxies is None: - self._session.proxies = {} - self._session.proxies["https"] = config["https_proxy"] - - if "http_proxy" in config: - if self._session.proxies is None: - self._session.proxies = {} - self._session.proxies["http"] = config["http_proxy"] - - if "timeout" in config: - self._timeout = config["timeout"] - - def _setup_ssl(self, config): - if "ssl_version" in config and config["cacert"] is not None: - ssl_version = getattr(ssl, config["ssl_version"]) - else: - ssl_version = ssl.PROTOCOL_TLS - - self._session.mount(self._base_path, _SSLAdapter(ssl_version)) - - verify = True - if "cacert" in config: - verify = config["cacert"] - if "ssl_insecure" in config and config["ssl_insecure"]: - _LOGGER.warning("*** Warning! ***\n" - "Insecure SSL mode, certificate will not be checked! " - "Please consider removing the `ssl_insecure` configuration option.\n" - "****************") - warnings.filterwarnings(action="ignore", module="urllib3", - message="Unverified HTTPS request is being made") - verify = False - if verify is not None: - self._session.verify = verify - - -def _make_conf(*conf): - """_make_conf. - - Merge several config dicts into one. The precedence goes to latter dicts in - the function call. - - Parameters - ---------- - *conf : dict - One ore more dicts with lower case option names (i.e. keys). - - Returns - ------- - dict - A merged config dict. - """ - result = {} - - for conf_dict in conf: - result.update(conf_dict) - - return result - - -_DEFAULT_CONF = { - "password_method": "input", - "implementation": _DefaultCaosDBServerConnection, - "timeout": 210, -} - - -def _get_authenticator(**config): - """_get_authenticator. - - Import and configure the password_method. - - Parameters - ---------- - password_method : str - The simple name of a submodule of caosdb.connection.authentication. - Currently, there are four valid values for this parameter: 'plain', - 'pass', 'keyring' and 'auth_token'. - **config : - Any other keyword arguments are passed the configre method of the - password_method. - - Returns - ------- - AbstractAuthenticator - An object which implements the password_method and which already - configured. - - Raises - ------ - ConfigurationError - If the password_method string cannot be resolved to a CaosAuthenticator - class. - """ - auth_module = ("caosdb.connection.authentication." + - config["password_method"]) - _LOGGER.debug("import auth_module %s", auth_module) - try: - __import__(auth_module) - - auth_provider = sys.modules[auth_module].get_authentication_provider() - auth_provider.configure(**config) - - return auth_provider - - except ImportError: - raise ConfigurationError("Password method \"{}\" not implemented. " - "Try `plain`, `pass`, `keyring`, or " - "`auth_token`." - .format(config["password_method"])) - - -def configure_connection(**kwargs): - """Configures the caosdb connection and returns the Connection object. - - The effective configuration is governed by the default values (see - 'Parameters'), the global configuration (see `caosdb.get_config()`) and the - parameters which are passed to this function, with ascending priority. - - The parameters which are listed here, are possibly not sufficient for a - working configuration of the connection. Check the `configure` method of - the implementation class and the password_method for more details. - - Parameters - ---------- - url : str - The url of the CaosDB Server. HTTP and HTTPS urls are allowed. However, - it is **highly** recommend to avoid HTTP because passwords and - authentication token are send over the network in plain text. - - username : str - Username for login; e.g. 'admin'. - - password : str - Password for login if 'plain' is used as password_method. - - password_method : str - The name of a submodule of caosdb.connection.authentication which - implements the AbstractAuthenticator interface. (Default: 'plain') - Possible values are, for example: - - "plain" Need username and password arguments. - - "input" Asks for the password. - - "pass" Uses the `pass` password manager. - - "keyring" Uses the `keyring` library. - - "auth_token" Uses only a given auth_token. - - timeout : int - A connection timeout in seconds. (Default: 210) - - ssl_insecure : bool - Whether SSL certificate warnings should be ignored. Only use this for - development purposes! (Default: False) - - auth_token : str (optional) - An authentication token which has been issued by the CaosDB Server. - Implies `password_method="auth_token"` if set. An example token string would be `["O","OneTimeAuthenticationToken","anonymous",["administration"],[],1592995200000,604800000,"3ZZ4WKRB-5I7DG2Q6-ZZE6T64P-VQ","197d0d081615c52dc18fb323c300d7be077beaad4020773bb58920b55023fa6ee49355e35754a4277b9ac525c882bcd3a22e7227ba36dfcbbdbf8f15f19d1ee9",1,30000]`. - - https_proxy : str, optional - Define a proxy for the https connections, e.g. `http://localhost:8888`, - `socks5://localhost:8888`, or `socks4://localhost:8888`. These are - either (non-TLS) HTTP proxies, SOCKS4 proxies, or SOCKS5 proxies. HTTPS - proxies are not supported. However, the connection will be secured - using TLS in the tunneled connection nonetheless. Only the connection - to the proxy is insecure which is why it is not recommended to use HTTP - proxies when authentication against the proxy is necessary. If - unspecified, the https_proxy option of the pycaosdb.ini or the HTTPS_PROXY - environment variable are being used. Use `None` to override these - options with a no-proxy setting. - - http_proxy : str, optional - Define a proxy for the http connections, e.g. `http://localhost:8888`. - If unspecified, the http_proxy option of the pycaosdb.ini or the - HTTP_PROXY environment variable are being used. Use `None` to override - these options with a no-proxy setting. - - implementation : CaosDBServerConnection - The class which implements the connection. (Default: - _DefaultCaosDBServerConnection) - - Returns - ------- - _Connection - The singleton instance of the _Connection class. - """ - global_conf = {} - conf = get_config() - # Convert config to dict, with preserving types - int_opts = ["timeout"] - bool_opts = ["ssl_insecure"] - - if conf.has_section("Connection"): - global_conf = dict(conf.items("Connection")) - # Integer options - - for opt in int_opts: - if opt in global_conf: - global_conf[opt] = conf.getint("Connection", opt) - # Boolean options - - for opt in bool_opts: - if opt in global_conf: - global_conf[opt] = conf.getboolean("Connection", opt) - local_conf = _make_conf(_DEFAULT_CONF, global_conf, kwargs) - - connection = _Connection.get_instance() - - if "socket_proxy" in local_conf: - warnings.warn("Deprecated configuration option: socket_proxy. Use " - "the new https_proxy option instead", - DeprecationWarning, stacklevel=1) - connection.configure(**local_conf) - - return connection - - -def get_connection(): - """Return the connection. - - If the connection was not configured yet `configure_connection` will - be called inside this function without arguments. - """ - connection = _Connection.get_instance() - - if connection.is_configured: - return connection - - return configure_connection() - - -def _handle_response_status(http_response): - - status = http_response.status - - if status == 200: - return - - # emtpy response buffer - body = http_response.read() - - if status == 404: - raise HTTPResourceNotFoundError("This resource has not been found.") - elif status > 499: - raise HTTPServerError(body=body) - - reason = http_response.reason - standard_message = ("Request failed. The response returned with status " - "{} - {}.".format(status, reason)) - if status == 401: - raise LoginFailedError(standard_message) - elif status == 403: - raise HTTPForbiddenError(standard_message) - elif status in (413, 414): - raise HTTPURITooLongError(standard_message) - elif 399 < status < 500: - raise HTTPClientError(msg=standard_message, status=status, body=body) - else: - raise CaosDBException(standard_message) - - -class _Connection(object): # pylint: disable=useless-object-inheritance - """This connection class provides the interface to the database connection - allowing for retrieval, insertion, update, etc. of entities, files, users, - roles and much more. - - It wrapps an instance of CaosDBServerConnection which actually does the - work (how, depends on the instance). - - It is a singleton and should not be instanciated or modified by any client. - Use the methods `get_connection` and `configure_connection` for this - purpose. - """ - - __instance = None - - def __init__(self): - self._delegate_connection = None - self._authenticator = None - self.is_configured = False - - @classmethod - def get_instance(cls): - if cls.__instance is None: - cls.__instance = _Connection() - - return cls.__instance - - def configure(self, **config): - self.is_configured = True - - if "implementation" not in config: - raise ConfigurationError( - "Missing CaosDBServerConnection implementation. You did not " - "specify an `implementation` for the connection.") - try: - self._delegate_connection = config["implementation"]() - - if not isinstance(self._delegate_connection, - CaosDBServerConnection): - raise TypeError("The `implementation` callable did not return " - "an instance of CaosDBServerConnection.") - except TypeError as type_err: - raise ConfigurationError( - "Bad CaosDBServerConnection implementation. The " - "implementation must be a callable object which returns an " - "instance of `CaosDBServerConnection` (e.g. a constructor " - "or a factory).\n{}".format(type_err.args[0])) - self._delegate_connection.configure(**config) - - if "auth_token" in config: - # deprecated, needed for older scripts - config["password_method"] = "auth_token" - if "password_method" not in config: - raise ConfigurationError("Missing password_method. You did " - "not specify a `password_method` for" - "the connection.") - self._authenticator = _get_authenticator( - connection=self._delegate_connection, **config) - - return self - - def retrieve(self, entity_uri_segments=None, query_dict=None, **kwargs): - path = make_uri_path(entity_uri_segments, query_dict) - - http_response = self._http_request(method="GET", path=path, **kwargs) - - return http_response - - def delete(self, entity_uri_segments=None, query_dict=None, **kwargs): - path = make_uri_path(entity_uri_segments, query_dict) - - http_response = self._http_request( - method="DELETE", path=path, **kwargs) - - return http_response - - def update(self, entity_uri_segment, query_dict=None, **kwargs): - path = make_uri_path(entity_uri_segment, query_dict) - - http_response = self._http_request(method="PUT", path=path, **kwargs) - - return http_response - - def activate_user(self, link): - self._authenticator.logout() - fullurl = urlparse(link) - path = fullurl.path - query = fullurl.query - http_response = self._http_request( - method="GET", path=path + "?" + query) - - return http_response - - def put_form_data(self, entity_uri_segment, params): - return self._form_data_request( - method="PUT", path=entity_uri_segment, params=params) - - def post_form_data(self, entity_uri_segment, params): - return self._form_data_request( - method="POST", - path=entity_uri_segment, - params=params) - - def _form_data_request(self, method, path, params): - body = urlencode(params) - headers = {} - headers["Content-Type"] = "application/x-www-form-urlencoded" - response = self._http_request( - method=method, - path=quote(path), - body=body, - headers=headers) - - return response - - def insert(self, entity_uri_segment, query_dict=None, body=None, **kwargs): - path = make_uri_path(entity_uri_segment, query_dict) - - http_response = self._http_request( - method="POST", path=path, body=body, **kwargs) - - return http_response - - def download_file(self, path): - """This function downloads a file via HTTP from the Caosdb file - system.""" - try: - uri_segments = ["FileSystem"] - uri_segments.extend(path.split("/")) - - return self.retrieve(entity_uri_segments=uri_segments) - except HTTPResourceNotFoundError: - raise HTTPResourceNotFoundError("This file does not exist.") - - def _login(self): - self._authenticator.login() - - def _logout(self): - self._authenticator.logout() - - def _http_request(self, method, path, headers=None, body=None, **kwargs): - try: - return self._retry_http_request(method=method, path=path, - headers=headers, body=body, - **kwargs) - except SocketError as e: - if e.errno != BrokenPipe: - raise - - return self._retry_http_request(method=method, path=path, - headers=headers, body=body, - reconnect=False, - **kwargs) - except LoginFailedError: - if kwargs.get("reconnect", True) is True: - self._login() - - return self._retry_http_request(method=method, path=path, - headers=headers, body=body, - reconnect=False, - **kwargs) - raise - - def _retry_http_request(self, method, path, headers, body, **kwargs): - - if hasattr(body, "encode"): - # python3 - body = body.encode("utf-8") - - if headers is None: - headers = {} - self._authenticator.on_request(method=method, path=path, - headers=headers) - _LOGGER.debug("request: %s %s %s", method, path, str(headers)) - http_response = self._delegate_connection.request( - method=method, - path=path, - headers=headers, - body=body) - _LOGGER.debug("response: %s %s", str(http_response.status), - str(http_response.getheaders())) - self._authenticator.on_response(http_response) - _handle_response_status(http_response) - - return http_response - - def get_username(self): - """ - Return the username of the current connection. - - Shortcut for: get_connection()._authenticator._credentials_provider.username - """ - return self._authenticator._credentials_provider.username +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.connection`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/encode.py b/src/caosdb/connection/encode.py index 0b826cc4400275a2374308ee104cdbdabb619b75..714df86244863ebcccf1e173c1a7e5ef90ec3952 100644 --- a/src/caosdb/connection/encode.py +++ b/src/caosdb/connection/encode.py @@ -1,515 +1,6 @@ -# -*- encoding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -# Original work Copyright (c) 2011 Chris AtLee -# Modified work Copyright (c) 2017 Biomedical Physics, MPI for Dynamics and Self-Organization -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -"""multipart/form-data encoding module. -This module provides functions that faciliate encoding name/value pairs -as multipart/form-data suitable for a HTTP POST or PUT request. +from linkahead.connection.encode import * +from warnings import warn -multipart/form-data is the standard way to upload files over HTTP -""" - -__all__ = [ - 'gen_boundary', 'encode_and_quote', 'MultipartParam', 'encode_string', - 'encode_file_header', 'get_body_size', 'get_headers', 'multipart_encode', - 'ReadableMultiparts', -] -from urllib.parse import quote_plus -from io import UnsupportedOperation -import uuid -import re -import os -import mimetypes -from email.header import Header - - -def gen_boundary(): - """Returns a random string to use as the boundary for a message.""" - return uuid.uuid4().hex - - -def encode_and_quote(data): - """If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8")) - otherwise return urllib.quote_plus(data)""" - if data is None: - return None - - return quote_plus(data) - - -class MultipartParam(object): - """Represents a single parameter in a multipart/form-data request. - - ``name`` is the name of this parameter. - - If ``value`` is set, it must be a string or unicode object to use as the - data for this parameter. - - If ``filename`` is set, it is what to say that this parameter's filename - is. Note that this does not have to be the actual filename any local file. - - If ``filetype`` is set, it is used as the Content-Type for this parameter. - If unset it defaults to "text/plain; charset=utf8" - - If ``filesize`` is set, it specifies the length of the file ``fileobj`` - - If ``fileobj`` is set, it must be a file-like object that supports - .read(). - - Both ``value`` and ``fileobj`` must not be set, doing so will - raise a ValueError assertion. - - If ``fileobj`` is set, and ``filesize`` is not specified, then - the file's size will be determined first by stat'ing ``fileobj``'s - file descriptor, and if that fails, by seeking to the end of the file, - recording the current position as the size, and then by seeking back to the - beginning of the file. - - ``callback`` is a callable which will be called from iter_encode with (self, - current_transferred, total), representing the current parameter, current amount - transferred, and the total size. - """ - - def __init__(self, - name, - value=None, - filename=None, - filetype=None, - filesize=None, - fileobj=None, - callback=None): - self.name = Header(name).encode() - self.value = value - if filename is None: - self.filename = None - else: - bfilename = filename.encode("ascii", "xmlcharrefreplace") - self.filename = bfilename.decode("UTF-8").replace('"', '\\"') - - self.filetype = filetype - - self.filesize = filesize - self.fileobj = fileobj - self.callback = callback - - if self.value is not None and self.fileobj is not None: - raise ValueError("Only one of value or fileobj may be specified") - - if fileobj is not None and filesize is None: - # Try and determine the file size - try: - self.filesize = os.fstat(fileobj.fileno()).st_size - except (OSError, AttributeError, UnsupportedOperation): - try: - fileobj.seek(0, 2) - self.filesize = fileobj.tell() - fileobj.seek(0) - except BaseException: - raise ValueError("Could not determine filesize") - - def __cmp__(self, other): - attrs = [ - 'name', 'value', 'filename', 'filetype', 'filesize', 'fileobj' - ] - myattrs = [getattr(self, a) for a in attrs] - oattrs = [getattr(other, a) for a in attrs] - return cmp(myattrs, oattrs) - - def reset(self): - """Reset the file object's read pointer.""" - if self.fileobj is not None: - self.fileobj.seek(0) - elif self.value is None: - raise ValueError("Don't know how to reset this parameter") - - @classmethod - def from_file(cls, paramname, filename): - """Returns a new MultipartParam object constructed from the local file - at ``filename``. - - ``filesize`` is determined by os.path.getsize(``filename``) - - ``filetype`` is determined by mimetypes.guess_type(``filename``)[0] - - ``filename`` is set to os.path.basename(``filename``) - """ - - return cls( - paramname, - filename=os.path.basename(filename), - filetype=mimetypes.guess_type(filename)[0], - filesize=os.path.getsize(filename), - fileobj=open(filename, "rb")) - - @classmethod - def from_params(cls, params): - """Returns a list of MultipartParam objects from a sequence of name, - value pairs, MultipartParam instances, or from a mapping of names to - values. - - The values may be strings or file objects, or MultipartParam - objects. MultipartParam object names must match the given names - in the name,value pairs or mapping, if applicable. - """ - if hasattr(params, 'items'): - params = params.items() - - retval = [] - for item in params: - if isinstance(item, cls): - retval.append(item) - continue - name, value = item - if isinstance(value, cls): - assert value.name == name - retval.append(value) - continue - if hasattr(value, 'read'): - # Looks like a file object - filename = getattr(value, 'name', None) - if filename is not None: - filetype = mimetypes.guess_type(filename)[0] - else: - filetype = None - - retval.append( - cls(name=name, - filename=filename, - filetype=filetype, - fileobj=value)) - else: - retval.append(cls(name, value)) - return retval - - def encode_hdr(self, boundary): - """Returns the header of the encoding of this parameter.""" - boundary = encode_and_quote(boundary) - - headers = ["--%s" % boundary] - - if self.filename: - disposition = 'form-data; name="%s"; filename="%s"' % ( - self.name, self.filename) - else: - disposition = 'form-data; name="%s"' % self.name - - headers.append("Content-Disposition: %s" % disposition) - - if self.filetype: - filetype = self.filetype - else: - filetype = "text/plain; charset=utf-8" - - headers.append("Content-Type: %s" % filetype) - - headers.append("") - headers.append("") - - return "\r\n".join(headers) - - def encode(self, boundary): - """Returns the string encoding of this parameter.""" - if self.value is None: - value = self.fileobj.read() - else: - value = self.value - - if re.search("^--%s$" % re.escape(boundary), value, re.M): - raise ValueError("boundary found in encoded string") - - return "%s%s\r\n" % (self.encode_hdr(boundary), value) - - def iter_encode(self, boundary, blocksize=4096): - """Yields the encoding of this parameter If self.fileobj is set, then - blocks of ``blocksize`` bytes are read and yielded.""" - total = self.get_size(boundary) - current_transferred = 0 - if self.value is not None: - block = self.encode(boundary) - current_transferred += len(block) - yield block - if self.callback: - self.callback(self, current_transferred, total) - else: - block = self.encode_hdr(boundary) - current_transferred += len(block) - yield block - if self.callback: - self.callback(self, current_transferred, total) - last_block = b"" - encoded_boundary = "--%s" % encode_and_quote(boundary) - boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary), - re.M) - while True: - block = self.fileobj.read(blocksize) - if not block: - current_transferred += 2 - yield "\r\n" - if self.callback: - self.callback(self, current_transferred, total) - break - last_block += block - if boundary_exp.search(last_block.decode("ascii", "ignore")): - raise ValueError("boundary found in file data") - last_block = last_block[-len(encoded_boundary) - 2:] - current_transferred += len(block) - yield block - if self.callback: - self.callback(self, current_transferred, total) - - def get_size(self, boundary): - """Returns the size in bytes that this param will be when encoded with - the given boundary.""" - if self.filesize is not None: - valuesize = self.filesize - else: - valuesize = len(self.value) - - return len(self.encode_hdr(boundary)) + 2 + valuesize - - -def encode_string(boundary, name, value): - """Returns ``name`` and ``value`` encoded as a multipart/form-data - variable. - - ``boundary`` is the boundary string used throughout a single request - to separate variables. - """ - - return MultipartParam(name, value).encode(boundary) - - -def encode_file_header(boundary, - paramname, - filesize, - filename=None, - filetype=None): - """Returns the leading data for a multipart/form-data field that contains - file data. - - ``boundary`` is the boundary string used throughout a single request to - separate variables. - - ``paramname`` is the name of the variable in this request. - - ``filesize`` is the size of the file data. - - ``filename`` if specified is the filename to give to this field. This - field is only useful to the server for determining the original filename. - - ``filetype`` if specified is the MIME type of this file. - - The actual file data should be sent after this header has been sent. - """ - - return MultipartParam( - paramname, filesize=filesize, filename=filename, - filetype=filetype).encode_hdr(boundary) - - -def get_body_size(params, boundary): - """Returns the number of bytes that the multipart/form-data encoding of - ``params`` will be.""" - size = sum( - p.get_size(boundary) for p in MultipartParam.from_params(params)) - return size + len(boundary) + 6 - - -def get_headers(params, boundary): - """Returns a dictionary with Content-Type and Content-Length headers for - the multipart/form-data encoding of ``params``.""" - headers = {} - boundary = quote_plus(boundary) - headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary - headers['Content-Length'] = str(get_body_size(params, boundary)) - return headers - - -class MultipartYielder(object): - """An iterator that yields the parameters of a multipart/formdata http - body.""" - - def __init__(self, params, boundary, callback): - self.params = params - self.boundary = boundary - self.callback = callback - - self.i = 0 - self.current_part = None - self.param_iter = None - self.current_transferred = 0 - self.total = get_body_size(params, boundary) - - def __iter__(self): - return self - - # since python 3 - def __next__(self): - return self.next() - - def next(self): - """generator function to yield multipart/form-data representation of - parameters.""" - if self.param_iter is not None: - try: - block = next(self.param_iter) - self.current_transferred += len(block) - if self.callback: - self.callback(self.current_part, - self.current_transferred, self.total) - return block - except StopIteration: - self.current_part = None - self.param_iter = None - - if self.i is None: - raise StopIteration - elif self.i >= len(self.params): - self.param_iter = None - self.current_part = None - self.i = None - block = "--%s--\r\n" % self.boundary - self.current_transferred += len(block) - if self.callback: - self.callback(self.current_part, - self.current_transferred, self.total) - return block - - self.current_part = self.params[self.i] - self.param_iter = self.current_part.iter_encode(self.boundary) - self.i += 1 - return next(self) - - def reset(self): - """Reset the iterator.""" - self.i = 0 - self.current_transferred = 0 - for param in self.params: - param.reset() - - -def multipart_encode(params, boundary=None, callback=None): - """Encode ``params`` as multipart/form-data. - - ``params`` should be a sequence of (name, value) pairs or MultipartParam - objects, or a mapping of names to values. - Values are either strings parameter values, or file-like objects to use as - the parameter value. The file-like objects must support .read() and either - .fileno() or both .seek() and .tell(). - - If ``boundary`` is set, then it as used as the MIME boundary. Otherwise - a randomly generated boundary will be used. In either case, if the - boundary string appears in the parameter values a ValueError will be - raised. - - If ``callback`` is set, it should be a callback which will get called as blocks - of data are encoded. It will be called with (param, current_transferred, total), - indicating the current parameter being encoded, the current amount encoded, - and the total amount to encode. - - Returns a tuple of `datagen`, `headers`, where `datagen` is a - generator that will yield blocks of data that make up the encoded - parameters, and `headers` is a dictionary with the assoicated - Content-Type and Content-Length headers. - - Examples: - - >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] ) - >>> s = "".join(datagen) - >>> assert "value2" in s and "value1" in s - - >>> p = MultipartParam("key", "value2") - >>> datagen, headers = multipart_encode( [("key", "value1"), p] ) - >>> s = "".join(datagen) - >>> assert "value2" in s and "value1" in s - - >>> datagen, headers = multipart_encode( {"key": "value1"} ) - >>> s = "".join(datagen) - >>> assert "value2" not in s and "value1" in s - """ - if boundary is None: - boundary = gen_boundary() - else: - boundary = quote_plus(boundary) - - headers = get_headers(params, boundary) - params = MultipartParam.from_params(params) - - return MultipartYielder(params, boundary, callback), headers - - -class ReadableMultiparts(object): - """Wraps instances of the MultipartYielder class as a readable and withable - object.""" - - def __init__(self, multipart_yielder): - self.multipart_yielder = multipart_yielder - self.current_block = None - self.left_over = b'' - - def read(self, size=-1): - result = self.left_over - while size == -1 or len(result) < size: - try: - next_chunk = self.multipart_yielder.next() - if hasattr(next_chunk, "encode"): - next_chunk = next_chunk.encode("utf8") - result += next_chunk - except StopIteration: - break - - if size == -1: - self.left_over = b'' - return result - - self.left_over = result[size:] - return result[:size] - - def __enter__(self): - pass - - def __exit__(self, type, value, traceback): - self.close() - - def close(self): - self.multipart_yielder.reset() +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.encode`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/interface.py b/src/caosdb/connection/interface.py index a6f739171597a848356321578f4a4065a2e94006..77e4e57c31035bec3f133227064b8cf3b0645670 100644 --- a/src/caosdb/connection/interface.py +++ b/src/caosdb/connection/interface.py @@ -1,97 +1,6 @@ -# -*- encoding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""This module defines the CaosDBServerConnection interface.""" -from abc import ABCMeta, abstractmethod, abstractproperty -# meta class compatible with Python 2 *and* 3: -ABC = ABCMeta('ABC', (object, ), {'__slots__': ()}) +from linkahead.connection.interface import * +from warnings import warn - -class CaosDBHTTPResponse(ABC): - """An abstract class which defines a minimal interface for responses of the - CaosDBServer.""" - - @abstractmethod - def read(self, size=-1): - """Read up to *size* bytes from the response body. - - If size is unspecified or -1, all bytes until EOF are returned. - """ - - @abstractmethod - def getheader(self, name, default=None): - """Return the value of the header *name* or the value of *default* if - there is no such header. - - If there are multiple headers with that name, return all of the - values joined by ', '. If default is an iterable, its elements - are returned likewise. - """ - - @abstractproperty - def status(self): - """Status code of the response.""" - - @abstractmethod - def getheaders(self): - """Return all headers.""" - - def __enter__(self): - pass - - def __exit__(self, type, value, traceback): - self.close() - - @abstractmethod - def close(self): - """close. - - Close this response. Depending on the implementation this might - also close underlying streams, sockets etc. - """ - - -class CaosDBServerConnection(ABC): - """Abstract class which defines the interface for sending requests to the - caosdb server.""" - - @abstractmethod - def request(self, method, path, headers=None, body=None, **kwargs): - """Abstract method. Implement this method for HTTP requests to the - caosdb server. - - Returns - ------- - CaosDBHTTPResponse - """ - - @abstractmethod - def configure(self, **kwargs): - """Configure the connection. This method is to be called by - configure_connection. - - Returns - ------- - None - """ +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.interface`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/mockup.py b/src/caosdb/connection/mockup.py index b37670b867cd88cf47e64084c6ccc802cad463b4..3adff45fc7d9a19bd06617a9be4169fe4b7fc657 100644 --- a/src/caosdb/connection/mockup.py +++ b/src/caosdb/connection/mockup.py @@ -1,100 +1,6 @@ -# -*- encoding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""Classes and functions to immitate a caosdb server without actually setting -up a tcp connection.""" -from __future__ import unicode_literals, print_function -from io import StringIO -from .connection import CaosDBServerConnection, CaosDBHTTPResponse +from linkahead.connection.mockup import * +from warnings import warn - -def _request_log_message(method, path, headers, body, **kwargs): - return "{method}: {path}{nl0}{kwargs}{nl1}{headers}{nl2}{body}".format( - method=method, - path=path, - headers=headers if headers else "", - body=body if body else "", - kwargs=kwargs if kwargs else "", - nl0="\n" if kwargs else "", - nl1="\n" if headers else "", - nl2="\n" if body else "") - - -class MockUpResponse(CaosDBHTTPResponse): - """A class for the responses of a request to the MockUpServerConnection.""" - - def __init__(self, status, headers, body): - self._status = status - self.headers = headers - self.response = StringIO(body) - - @property - def status(self): - """Return the status of the response.""" - return self._status - - def read(self, size=-1): - """Return the body of the response.""" - return self.response.read(size).encode() - - def getheader(self, name, default=None): - """Get the contents of the header `name`, or `default` if there is no - matching header.""" - return self.headers[name] if name in self.headers else default - - def getheaders(self): - return self.headers - - def close(self): - pass - - -class MockUpServerConnection(CaosDBServerConnection): - """The mock-up connection which does not actually connect to anything but - just returns predefined responses which mimic the caosdb server.""" - - def __init__(self): - self.resources = [self._login] - - def _login(self, method, path, headers, body): - if method == "POST" and path == "login": - return MockUpResponse(200, - headers={"AuthToken": - "mockup-auth-token"}, - body="") - - def configure(self, **kwargs): - """This configure method does nothing.""" - - def request(self, method, path, headers=None, body=None, **kwargs): - """Search a resource in the `resources` list which is answering to the - request and return the response or raise a RuntimeError.""" - for resource in self.resources: - response = resource( - method=method, path=path, headers=headers, body=body, **kwargs) - if response: - return response - raise RuntimeError( - "No response for this request - " + - _request_log_message(method, path, headers, body, **kwargs)) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.mockup`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/connection/utils.py b/src/caosdb/connection/utils.py index 095d47035e24dad5b6d7041f5d3b8a739652f271..9a6b5d9709d4daf213de902d1145e715240676ea 100644 --- a/src/caosdb/connection/utils.py +++ b/src/caosdb/connection/utils.py @@ -1,190 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""Utility functions for the connection module.""" -from __future__ import unicode_literals, print_function -from builtins import str as unicode -from urllib.parse import (urlencode as _urlencode, quote as _quote, - urlparse, urlunparse, unquote as _unquote) -import re +from linkahead.connection.utils import * +from warnings import warn -def urlencode(query): - """Convert a dict of into a url-encoded (unicode) string. - - This is basically a python2/python3 compatibility wrapper for the respective - functions in the urllib module with some slight modifications. - - 1) A `None` is translated to an empty string. - >>> urlencode({'key': None}) - 'key=' - - 2) Unicode strings are allowed for python2. - >>> urlencode({'kèy': 'välüe'}) - 'k%C3%A8y=v%C3%A4l%C3%BCe' - - 3) All other parameters which can be passed to the respective functions are - not implemented here and the default parameters will be used. - -.. code:: - - >>> urlencode({'key': ['val1', 'val2']}, doseq=True) - Traceback (most recent call last): - ... - TypeError: urlencode() got an unexpected keyword argument 'doseq' - -Otherwise, this functions works exactly as its counterparts in the urllib -modules when they are called with only the query parameter. - - Parameters - ---------- - query : dict - A dict of str or unicode keys with None, unicode or str values. - - Returns - ------- - str - A series of `key=value` pairs separated by `&`. - - Raises - ------ - AttributeError - If a key or a value does not have a 'encode' method. - """ - return str( - _urlencode({ - key.encode("utf-8"): (val.encode("utf-8") - if val is not None else '') - for key, val in query.items() - })) - - -def make_uri_path(segments=None, query=None): - """Url-encode all segments, concat them with slashes and append the query. - - Examples - -------- - >>> make_uri_path(['a','b']) - 'a/b' - >>> make_uri_path(['a','ö']) - 'a/%C3%B6' - >>> make_uri_path(['a','b'], {'key': 'val'}) - 'a/b?key=val' - - Parameters - ---------- - segments : list of str - The segments of the path - query: dict - A dict of str keys with None or str values. - - Returns - ------- - str - A relative uri path (no host information, possibly no root path). - """ - path_no_query = ("/".join([quote(segment) for segment in segments]) - if segments else "") - return str(path_no_query if query is None else "?".join([ - path_no_query, "&".join([ - quote(key) + "=" + - (quote(query[key]) if query[key] is not None else "") - for key in query - ]) - ])) - - -def quote(string): - enc = string.encode('utf-8') - return _quote(enc).replace('/', '%2F') - - -def parse_url(url): - fullurl = urlparse(url) - # make sure the path ends with a slash - if not fullurl.path.endswith("/"): - parse_result = list(fullurl) - parse_result[2] += "/" - fullurl = urlparse(urlunparse(parse_result)) - return fullurl - - -_PATTERN = re.compile(r"^SessionToken=([^;]*);.*$") - - -def unquote(string): - """unquote. - - Decode an urlencoded string into a plain text string. - """ - bts = _unquote(string) - if hasattr(bts, "decode"): - # python 2 - return bts.decode("utf-8") - return bts - - -def parse_auth_token(cookie): - """parse_auth_token. - - Parse an auth token from a cookie. - - Parameters - ---------- - cookie : str - A cookie with an urlencoded authtoken. - - Returns - ------- - str - An auth token string. - """ - auth_token = None - if cookie is not None and _PATTERN.match(cookie): - auth_token = unquote(_PATTERN.split(cookie)[1]) - return auth_token - - -def auth_token_to_cookie(auth_token): - """auth_token_to_cookie. - - Urlencode an auth token string and format it as a cookie. - - Parameters - ---------- - auth_token : str - The plain auth token string. - - Raises - ------ - TypeError - If the auth_token was None - - Returns - ------- - str - A cookie - """ - if auth_token is None: - raise TypeError("Parameter `auth_token` was None.") - return "SessionToken=" + quote(auth_token) + ";" +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.connection.utils`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/exceptions.py b/src/caosdb/exceptions.py index ebbb52565ca7e95b064664da22797489c0d4d422..57964faa53fee10f66881dcf5e182dd0c06b7b87 100644 --- a/src/caosdb/exceptions.py +++ b/src/caosdb/exceptions.py @@ -1,359 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 Indiscale GmbH <info@indiscale.com> -# Copyright (C) 2020 Florian Spreckelsen <f.spreckelsen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""The exceptions module defines exceptions for HTTP Errors (4xx and 5xx and -HTTP response codes) and for transaction errors (i.e. missing permissions, -dependencies, non-passing consistency checks etc.). -""" -from lxml import etree +from linkahead.exceptions import * +from warnings import warn - -class CaosDBException(Exception): - """Base class of all CaosDB exceptions.""" - - def __init__(self, msg): - Exception.__init__(self, msg) - self.msg = msg - - -class ConfigurationError(CaosDBException): - """ConfigurationError. - - Indicates a misconfiguration. - - Parameters - ---------- - msg : str - A descriptin of the misconfiguration. The constructor adds - a few lines with explainingg where to find the configuration. - - Attributes - ---------- - msg : str - A description of the misconfiguration. - """ - - def __init__(self, msg): - super().__init__(msg + ConfigurationError._INFO) - - _INFO = ("\n\nPlease check your ~/.pycaosdb.ini and your $PWD/" - ".pycaosdb.ini. Does at least one of them exist and are they correct?") - - -class ServerConfigurationException(CaosDBException): - """The server is configured in a different way than expected. - - This can be for example unexpected flags or settings or missing extensions. - """ - - -class HTTPClientError(CaosDBException): - """HTTPClientError represents 4xx HTTP client errors.""" - - def __init__(self, msg, status, body): - self.status = status - self.body = body - CaosDBException.__init__(self, msg) - - -class HTTPServerError(CaosDBException): - """HTTPServerError represents 5xx HTTP server errors.""" - - def __init__(self, body): - xml = etree.fromstring(body) - error = xml.xpath('/Response/Error')[0] - msg = error.get("description") - - if error.text is not None: - msg = msg + "\n\n" + error.text - CaosDBException.__init__(self, msg) - - -class CaosDBConnectionError(CaosDBException): - """Connection is not configured or the network is down.""" - - def __init__(self, msg=None): - CaosDBException.__init__(self, msg) - - -class HTTPURITooLongError(HTTPClientError): - """The URI of the last request was too long.""" - - def __init__(self, msg=None): - HTTPClientError.__init__(self, msg=msg, status=414, body=None) - - -class LoginFailedError(CaosDBException): - """Login failed. - - Probably, your username/password pair is wrong. - """ - - def __init__(self, msg=None): - CaosDBException.__init__(self, msg=msg) - - -class HTTPForbiddenError(HTTPClientError): - """You're lacking the required permissions. Corresponds to HTTP status - 403. - - """ - - def __init__(self, msg=None): - HTTPClientError.__init__(self, msg=msg, status=403, body=None) - - -class HTTPResourceNotFoundError(HTTPClientError): - """The requested resource doesn't exist; corresponds to HTTP status - 404. - - """ - - def __init__(self, msg=None): - HTTPClientError.__init__(self, msg=msg, status=404, body=None) - - -class MismatchingEntitiesError(CaosDBException): - """Mismatching entities were found during container sync.""" - - -# ######################### Bad query errors ########################### - -class BadQueryError(CaosDBException): - """Base class for query errors that are not transaction errors.""" - - -class PagingConsistencyError(BadQueryError): - """The database state changed between two consecutive paged requests of the - same query.""" - - -class QueryNotUniqueError(BadQueryError): - """A unique query or retrieve found more than one entity.""" - - -class EmptyUniqueQueryError(BadQueryError): - """A unique query or retrieve dound no result.""" - - -# ######################### Transaction errors ######################### - - -class TransactionError(CaosDBException): - """An error of this type is raised whenever any transaction fails with - one or more entities between client and CaosDB server. More - detailed errors are collected as direct and indirect children in - the 'errors' list (direct children) and the 'all_errors' set (set - of all direct and indirect children). - - """ - - def __init__(self, error=None, - msg="An error occured during the transaction.", - container=None): - CaosDBException.__init__(self, msg=msg) - self.errors = [] - self.all_errors = set() - self.entities = [] - self.all_entities = set() - self.container = container - # special case of faulty container - if container is not None and container.get_errors() is not None: - self.code = container.get_errors()[0].code - else: - self.code = None - if error is not None: - self.add_error(error) - - def has_error(self, error_t, direct_children_only=False): - """Check whether this transaction error contains an error of type - error_t. If direct_children_only is True, only direct children - are checked. - - Parameters - ---------- - error_t : EntityError - error type to be checked - direct_children_only: bool, optional - If True, only direct children, i.e., all errors in - self.errors are checked. Else all direct and indirect - children, i.e., all errors in self.all_errors are - used. Default is false. - - Returns - ------- - has_error : bool - True if at least one of the children is of type error_t, - False otherwise. - - """ - - test_set = self.errors if direct_children_only else self.all_errors - return any([isinstance(err, error_t) for err in test_set]) - - def add_error(self, error): - """Add an error as a direct child to this TransactionError. - - @param error: An EntityError or a list of EntityErrors. - - @raise TypeError: If and only if the 'error' parameter is not an - instance of EntityError. - - @return: self. - """ - - if hasattr(error, "__iter__"): - for err in error: - self.add_error(err) - - return self - elif isinstance(error, EntityError): - self.errors.append(error) - self.entities.append(error.entity) - - self.all_errors.add(error) - self.all_errors.update(error.all_errors) - self.all_entities.add(error.entity) - self.all_entities.update(error.all_entities) - - return self - else: - raise TypeError( - "Argument is to be an EntityError or a list of EntityErrors.") - - def _repr_reasons(self, indent): - if self.errors is not None and len(self.errors) > 0: - ret = "\n" + indent + " +--| REASONS |--" - - for err in self.errors: - ret += '\n' + indent + ' | -> ' + \ - err.__str__(indent=indent + ' |') - ret += "\n" + indent + " +----------------" - - return ret - else: - return '' - - def _repr_head(self, indent): - return indent + str(type(self).__name__) + ( - (': ' + self.msg) - if hasattr(self, 'msg') and self.msg is not None - else '' - ) - - def __str__(self, indent=''): - ret = self._repr_head(indent=indent) - ret += self._repr_reasons(indent=indent) - - return ret - - def __repr__(self): - return self.__str__() - - -class EntityError(TransactionError): - """This is the most basic entity error. It is constructed using an - entity that caused the error and the error message attached by the - server. - - """ - - def __init__(self, error=None, entity=None): - TransactionError.__init__(self) - self.error = error - if hasattr(error, "code"): - self.code = error.code - else: - self.code = None - self.entity = entity - - if error is not None and hasattr(error, "encode"): - self.msg = error - elif error is not None and hasattr(error, 'description'): - self.msg = error.description - elif error is None: - self.msg = None - else: - self.msg = str(error) - - @property - def description(self): - """The description of the error.""" - return self.error.description if self.error is not None else None - - def _repr_head(self, indent): - if hasattr(self, 'entity') and self.entity is not None: - return (str(type(self.entity).__name__).upper() + " (id: " + - str(self.entity.id) + ((", name: " + "'" + str(self.entity.name) + "'") if - self.entity.name is not None else '') + ") CAUSED " + - TransactionError._repr_head(self, indent)) - else: - return TransactionError._repr_head(self, indent) - - -class UniqueNamesError(EntityError): - """A name was supposed to be unique but was not.""" - - -class UnqualifiedParentsError(EntityError): - """This entity has unqualified parents (see 'errors' attribute for a - list of errors of the parent entities or 'entities' attribute for - a list of parent entities with errors). - - """ - - -class UnqualifiedPropertiesError(EntityError): - """This entity has unqualified properties (see 'errors' attribute for - a list of errors of the properties or 'entities' attribute for a - list of properties with errors). - - """ - - -class EntityDoesNotExistError(EntityError): - """This entity does not exist.""" - - -class EntityHasNoDatatypeError(EntityError): - """This has to have a data type.""" - - -class ConsistencyError(EntityError): - """The transaction violates database consistency.""" - - -class AuthorizationError(EntityError): - """You are not allowed to do what ever you tried to do. - - Maybe you need more privileges or a user account. - """ - - -class AmbiguousEntityError(EntityError): - """A retrieval of the entity was not possible because there is more - than one possible candidate. - """ +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.exceptions`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/high_level_api.py b/src/caosdb/high_level_api.py index 3509a7b6bfe7ec322f2e0d2590334c6fc6f02cf8..458e7d8dd4d16fd82f2c139edaabef174f221766 100644 --- a/src/caosdb/high_level_api.py +++ b/src/caosdb/high_level_api.py @@ -1,1053 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> -# Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -""" -A high level API for accessing CaosDB entities from within python. +from linkahead.high_level_api import * +from warnings import warn -This is refactored from apiutils. -""" - -from caosdb.common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, - REFERENCE, TEXT, - is_list_datatype, - get_list_datatype, - is_reference) -import caosdb as db - -from .apiutils import get_type_of_entity_with, create_flat_list -import warnings - -from typing import Any, Optional, List, Union, Dict - -import yaml - -from dataclasses import dataclass, fields -from datetime import datetime -from dateutil import parser - -warnings.warn("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or -removed in the future. Its purpose is to give an impression on how the Python client user interface -might be changed.""") - - -def standard_type_for_high_level_type(high_level_record: "CaosDBPythonEntity", - return_string: bool = False): - """ - For a given CaosDBPythonEntity either return the corresponding - class in the standard CaosDB API or - if return_string is True - return - the role as a string. - """ - if type(high_level_record) == CaosDBPythonRecord: - if not return_string: - return db.Record - return "Record" - elif type(high_level_record) == CaosDBPythonFile: - if not return_string: - return db.File - return "File" - elif type(high_level_record) == CaosDBPythonProperty: - if not return_string: - return db.Property - return "Property" - elif type(high_level_record) == CaosDBPythonRecordType: - if not return_string: - return db.RecordType - return "RecordType" - elif type(high_level_record) == CaosDBPythonEntity: - if not return_string: - return db.Entity - return "Entity" - raise RuntimeError("Incompatible type.") - - -def high_level_type_for_role(role: str): - if role == "Record": - return CaosDBPythonRecord - if role == "File": - return CaosDBPythonFile - if role == "Property": - return CaosDBPythonProperty - if role == "RecordType": - return CaosDBPythonRecordType - if role == "Entity": - return CaosDBPythonEntity - raise RuntimeError("Unknown role.") - - -def high_level_type_for_standard_type(standard_record: db.Entity): - if not isinstance(standard_record, db.Entity): - raise ValueError() - role = standard_record.role - if role == "Record" or type(standard_record) == db.Record: - return CaosDBPythonRecord - elif role == "File" or type(standard_record) == db.File: - return CaosDBPythonFile - elif role == "Property" or type(standard_record) == db.Property: - return CaosDBPythonProperty - elif role == "RecordType" or type(standard_record) == db.RecordType: - return CaosDBPythonRecordType - elif role == "Entity" or type(standard_record) == db.Entity: - return CaosDBPythonEntity - raise RuntimeError("Incompatible type.") - - -@dataclass -class CaosDBPropertyMetaData: - # name is already the name of the attribute - unit: Optional[str] = None - datatype: Optional[str] = None - description: Optional[str] = None - id: Optional[int] = None - importance: Optional[str] = None - - -class CaosDBPythonUnresolved: - pass - - -@dataclass -class CaosDBPythonUnresolvedParent(CaosDBPythonUnresolved): - """ - Parents can be either given by name or by ID. - - When resolved, both fields should be set. - """ - - id: Optional[int] = None - name: Optional[str] = None - - -@dataclass -class CaosDBPythonUnresolvedReference(CaosDBPythonUnresolved): - - def __init__(self, id=None): - self.id = id - - -class CaosDBPythonEntity(object): - - def __init__(self): - """ - Initialize a new CaosDBPythonEntity for the high level python api. - - Parents are either unresolved references or CaosDB RecordTypes. - - Properties are stored directly as attributes for the object. - Property metadata is maintained in a dctionary _properties_metadata that should - never be accessed directly, but only using the get_property_metadata function. - If property values are references to other objects, they will be stored as - CaosDBPythonUnresolvedReference objects that can be resolved later into - CaosDBPythonRecords. - """ - - # Parents are either unresolved references or CaosDB RecordTypes - self._parents: List[Union[ - CaosDBPythonUnresolvedParent, CaosDBPythonRecordType]] = [] - # self._id: int = CaosDBPythonEntity._get_new_id() - self._id: Optional[int] = None - self._name: Optional[str] = None - self._description: Optional[str] = None - self._version: Optional[str] = None - - self._file: Optional[str] = None - self._path: Optional[str] = None - - # name: name of property, value: property metadata - self._properties_metadata: Dict[CaosDBPropertyMetaData] = dict() - - # Store all current attributes as forbidden attributes - # which must not be changed by the set_property function. - self._forbidden = dir(self) + ["_forbidden"] - - def use_parameter(self, name, value): - self.__setattr__(name, value) - return value - - @property - def id(self): - """ - Getter for the id. - """ - return self._id - - @id.setter - def id(self, val: int): - self._id = val - - @property - def name(self): - """ - Getter for the name. - """ - return self._name - - @name.setter - def name(self, val: str): - self._name = val - - @property - def file(self): - """ - Getter for the file. - """ - if type(self) != CaosDBPythonFile: - raise RuntimeError("Please don't use the file attribute for entities" - " that are no files.") - return self._file - - @file.setter - def file(self, val: str): - if val is not None and type(self) != CaosDBPythonFile: - raise RuntimeError("Please don't use the file attribute for entities" - " that are no files.") - self._file = val - - @property - def path(self): - """ - Getter for the path. - """ - if type(self) != CaosDBPythonFile: - raise RuntimeError("Please don't use the path attribute for entities" - " that are no files.") - return self._path - - @path.setter - def path(self, val: str): - if val is not None and type(self) != CaosDBPythonFile: - raise RuntimeError("Please don't use the path attribute for entities" - " that are no files.") - self._path = val - - @property - def description(self): - """ - Getter for the description. - """ - return self._description - - @description.setter - def description(self, val: str): - self._description = val - - @property - def version(self): - """ - Getter for the version. - """ - return self._version - - @version.setter - def version(self, val: str): - self._version = val - - def _set_property_from_entity(self, ent: db.Entity, importance: str, - references: Optional[db.Container], - visited: Dict[int, "CaosDBPythonEntity"]): - """ - Set a new property using an entity from the normal python API. - - ent : db.Entity - The entity to be set. - """ - - if ent.name is None: - raise RuntimeError("Setting properties without name is impossible.") - - if ent.name in self.get_properties(): - raise RuntimeError("Multiproperty not implemented yet.") - - val = self._type_converted_value(ent.value, ent.datatype, - references, visited) - self.set_property( - ent.name, - val, - datatype=ent.datatype) - metadata = self.get_property_metadata(ent.name) - - for prop_name in fields(metadata): - k = prop_name.name - if k == "importance": - metadata.importance = importance - else: - metadata.__setattr__(k, ent.__getattribute__(k)) - - def get_property_metadata(self, prop_name: str) -> CaosDBPropertyMetaData: - """ - Retrieve the property metadata for the property with name prop_name. - - If the property with the given name does not exist or is forbidden, raise an exception. - Else return the metadata associated with this property. - - If no metadata does exist yet for the given property, a new object will be created - and returned. - - prop_name: str - Name of the property to retrieve metadata for. - """ - - if not self.property_exists(prop_name): - raise RuntimeError("The property with name {} does not exist.".format(prop_name)) - - if prop_name not in self._properties_metadata: - self._properties_metadata[prop_name] = CaosDBPropertyMetaData() - - return self._properties_metadata[prop_name] - - def property_exists(self, prop_name: str): - """ - Check whether a property exists already. - """ - return prop_name not in self._forbidden and prop_name in self.__dict__ - - def set_property(self, - name: str, - value: Any, - overwrite: bool = False, - datatype: Optional[str] = None): - """ - Set a property for this entity with a name and a value. - - If this property is already set convert the value into a list and append the value. - This behavior can be overwritten using the overwrite flag, which will just overwrite - the existing value. - - name: str - Name of the property. - - value: Any - Value of the property. - - overwrite: bool - Use this if you definitely only want one property with - that name (set to True). - """ - - if name in self._forbidden: - raise RuntimeError("Entity cannot be converted to a corresponding " - "Python representation. Name of property " + - name + " is forbidden!") - - already_exists = self.property_exists(name) - - if already_exists and not overwrite: - # each call to set_property checks first if it already exists - # if yes: Turn the attribute into a list and - # place all the elements into that list. - att = self.__getattribute__(name) - - if isinstance(att, list): - # just append, see below - pass - else: - old_att = self.__getattribute__(name) - self.__setattr__(name, [old_att]) - att = self.__getattribute__(name) - att.append(value) - else: - self.__setattr__(name, value) - - def __setattr__(self, name: str, val: Any): - """ - Allow setting generic properties. - """ - - # TODO: implement checking the value to correspond to one of the datatypes - # known for conversion. - - super().__setattr__(name, val) - - def _type_converted_list(self, - val: List, - pr: str, - references: Optional[db.Container], - visited: Dict[int, "CaosDBPythonEntity"]): - """ - Convert a list to a python list of the correct type. - - val: List - The value of a property containing the list. - - pr: str - The datatype according to the database entry. - """ - if not is_list_datatype(pr) and not isinstance(val, list): - raise RuntimeError("Not a list.") - - return [ - self._type_converted_value(i, get_list_datatype(pr), references, - visited) for i in val] - - def _type_converted_value(self, - val: Any, - pr: str, - references: Optional[db.Container], - visited: Dict[int, "CaosDBPythonEntity"]): - """ - Convert val to the correct type which is indicated by the database - type string in pr. - - References with ids will be turned into CaosDBPythonUnresolvedReference. - """ - - if val is None: - return None - elif isinstance(val, db.Entity): - # this needs to be checked as second case as it is the ONLY - # case which does not depend on pr - # TODO: we might need to pass through the reference container - return convert_to_python_object(val, references, visited) - elif isinstance(val, list): - return self._type_converted_list(val, pr, references, visited) - elif pr is None: - return val - elif pr == DOUBLE: - return float(val) - elif pr == BOOLEAN: - return bool(val) - elif pr == INTEGER: - return int(val) - elif pr == TEXT: - return str(val) - elif pr == FILE: - return CaosDBPythonUnresolvedReference(val) - elif pr == REFERENCE: - return CaosDBPythonUnresolvedReference(val) - elif pr == DATETIME: - return self._parse_datetime(val) - elif is_list_datatype(pr): - return self._type_converted_list(val, pr, references, visited) - else: - # Generic references to entities: - return CaosDBPythonUnresolvedReference(val) - - def _parse_datetime(self, val: Union[str, datetime]): - """ - Convert val into a datetime object. - """ - if isinstance(val, datetime): - return val - return parser.parse(val) - - def get_property(self, name: str): - """ - Return the value of the property with name name. - - Raise an exception if the property does not exist. - """ - if not self.property_exists(name): - raise RuntimeError("Property {} does not exist.".format(name)) - att = self.__getattribute__(name) - return att - - def attribute_as_list(self, name: str): - """ - This is a workaround for the problem that lists containing only one - element are indistinguishable from simple types in this - representation. - - TODO: still relevant? seems to be only a problem if LIST types are not used. - """ - att = self.get_property(name) - - if isinstance(att, list): - return att - else: - return [att] - - def add_parent(self, parent: Union[ - CaosDBPythonUnresolvedParent, "CaosDBPythonRecordType", str]): - """ - Add a parent to this entity. Either using an unresolved parent or - using a real record type. - - Strings as argument for parent will automatically be converted to an - unresolved parent. Likewise, integers as argument will be automatically converted - to unresolved parents with just an id. - """ - - if isinstance(parent, str): - parent = CaosDBPythonUnresolvedParent(name=parent) - - if isinstance(parent, int): - parent = CaosDBPythonUnresolvedParent(id=parent) - - if self.has_parent(parent): - raise RuntimeError("Duplicate parent.") - self._parents.append(parent) - - def get_parents(self): - """ - Returns all parents of this entity. - - Use has_parent for checking for existence of parents - and add_parent for adding parents to this entity. - """ - return self._parents - - def has_parent(self, parent: Union[ - CaosDBPythonUnresolvedParent, "CaosDBPythonRecordType"]): - """ - Check whether this parent already exists for this entity. - - Strings as argument for parent will automatically be converted to an - unresolved parent. Likewise, integers as argument will be automatically converted - to unresolved parents with just an id. - """ - - if isinstance(parent, str): - parent = CaosDBPythonUnresolvedParent(name=parent) - - if isinstance(parent, int): - parent = CaosDBPythonUnresolvedParent(id=parent) - - for p in self._parents: - if p.id is not None and p.id == parent.id: - return True - elif p.name is not None and p.name == parent.name: - return True - return False - - def _resolve_caosdb_python_unresolved_reference(self, propval, deep, - references, visited): - # This does not make sense for unset ids: - if propval.id is None: - raise RuntimeError("Unresolved property reference without an ID.") - # have we encountered this id before: - if propval.id in visited: - # self.__setattr__(prop, visited[propval.id]) - # don't do the lookup in the references container - return visited[propval.id] - - if references is None: - ent = db.Entity(id=propval.id).retrieve() - obj = convert_to_python_object(ent, references) - visited[propval.id] = obj - if deep: - obj.resolve_references(deep, references, visited) - return obj - - # lookup in container: - for ent in references: - # Entities in container without an ID will be skipped: - if ent.id is not None and ent.id == propval.id: - # resolve this entity: - obj = convert_to_python_object(ent, references) - visited[propval.id] = obj - # self.__setattr__(prop, visited[propval.id]) - if deep: - obj.resolve_references(deep, references, visited) - return obj - return propval - - def resolve_references(self, deep: bool, references: db.Container, - visited: Optional[Dict[Union[str, int], - "CaosDBPythonEntity"]] = None): - """ - Resolve this entity's references. This affects unresolved properties as well - as unresolved parents. - - deep: bool - If True recursively resolve references also for all resolved references. - - references: Optional[db.Container] - A container with references that might be resolved. - If None is passed as the container, this function tries to resolve entities from a running - CaosDB instance directly. - """ - - # This parameter is used in the recursion to keep track of already visited - # entites (in order to detect cycles). - if visited is None: - visited = dict() - - for parent in self.get_parents(): - # TODO - if isinstance(parent, CaosDBPythonUnresolvedParent): - pass - - for prop in self.get_properties(): - propval = self.__getattribute__(prop) - # Resolve all previously unresolved attributes that are entities: - if deep and isinstance(propval, CaosDBPythonEntity): - propval.resolve_references(deep, references) - elif isinstance(propval, list): - resolvedelements = [] - for element in propval: - if deep and isinstance(element, CaosDBPythonEntity): - element.resolve_references(deep, references) - resolvedelements.append(element) - if isinstance(element, CaosDBPythonUnresolvedReference): - resolvedelements.append( - self._resolve_caosdb_python_unresolved_reference(element, deep, - references, visited)) - else: - resolvedelements.append(element) - self.__setattr__(prop, resolvedelements) - - elif isinstance(propval, CaosDBPythonUnresolvedReference): - val = self._resolve_caosdb_python_unresolved_reference(propval, deep, - references, visited) - self.__setattr__(prop, val) - - def get_properties(self): - """ - Return the names of all properties. - """ - - return [p for p in self.__dict__ - if p not in self._forbidden] - - @staticmethod - def deserialize(serialization: dict): - """ - Deserialize a yaml representation of an entity in high level API form. - """ - - if "role" in serialization: - entity = high_level_type_for_role(serialization["role"])() - else: - entity = CaosDBPythonRecord() - - if "parents" in serialization: - for parent in serialization["parents"]: - if "unresolved" in parent: - id = None - name = None - if "id" in parent: - id = parent["id"] - if "name" in parent: - name = parent["name"] - entity.add_parent(CaosDBPythonUnresolvedParent( - id=id, name=name)) - else: - raise NotImplementedError( - "Currently, only unresolved parents can be deserialized.") - - for baseprop in ("name", "id", "description", "version"): - if baseprop in serialization: - entity.__setattr__(baseprop, serialization[baseprop]) - - if type(entity) == CaosDBPythonFile: - entity.file = serialization["file"] - entity.path = serialization["path"] - - for p in serialization["properties"]: - # The property needs to be set first: - - prop = serialization["properties"][p] - if isinstance(prop, dict): - if "unresolved" in prop: - entity.__setattr__(p, CaosDBPythonUnresolvedReference( - id=prop["id"])) - else: - entity.__setattr__(p, - entity.deserialize(prop)) - else: - entity.__setattr__(p, prop) - - # if there is no metadata in the yaml file just initialize an empty metadata object - if "metadata" in serialization and p in serialization["metadata"]: - metadata = serialization["metadata"][p] - propmeta = entity.get_property_metadata(p) - - for f in fields(propmeta): - if f.name in metadata: - propmeta.__setattr__(f.name, metadata[f.name]) - else: - pass - # raise NotImplementedError() - - return entity - - def serialize(self, without_metadata: bool = False, visited: dict = None): - """ - Serialize necessary information into a dict. - - without_metadata: bool - If True don't set the metadata field in order to increase - readability. Not recommended if deserialization is needed. - """ - - if visited is None: - visited = dict() - - if self in visited: - return visited[self] - - metadata: Dict[str, Any] = dict() - properties = dict() - parents = list() - - # The full information to be returned: - fulldict = dict() - visited[self] = fulldict - - # Add CaosDB role: - fulldict["role"] = standard_type_for_high_level_type(self, True) - - for parent in self._parents: - if isinstance(parent, CaosDBPythonEntity): - parents.append(parent.serialize(without_metadata, visited)) - elif isinstance(parent, CaosDBPythonUnresolvedParent): - parents.append({"name": parent.name, "id": parent.id, - "unresolved": True}) - else: - raise RuntimeError("Incompatible class used as parent.") - - for baseprop in ("name", "id", "description", "version"): - val = self.__getattribute__(baseprop) - if val is not None: - fulldict[baseprop] = val - - if type(self) == CaosDBPythonFile: - fulldict["file"] = self.file - fulldict["path"] = self.path - - for p in self.get_properties(): - m = self.get_property_metadata(p) - metadata[p] = dict() - for f in fields(m): - val = m.__getattribute__(f.name) - if val is not None: - metadata[p][f.name] = val - - val = self.get_property(p) - if isinstance(val, CaosDBPythonUnresolvedReference): - properties[p] = {"id": val.id, "unresolved": True} - elif isinstance(val, CaosDBPythonEntity): - properties[p] = val.serialize(without_metadata, visited) - elif isinstance(val, list): - serializedelements = [] - for element in val: - if isinstance(element, CaosDBPythonUnresolvedReference): - elm = dict() - elm["id"] = element.id - elm["unresolved"] = True - serializedelements.append(elm) - elif isinstance(element, CaosDBPythonEntity): - serializedelements.append( - element.serialize(without_metadata, - visited)) - else: - serializedelements.append(element) - properties[p] = serializedelements - else: - properties[p] = val - - fulldict["properties"] = properties - fulldict["parents"] = parents - - if not without_metadata: - fulldict["metadata"] = metadata - return fulldict - - def __str__(self): - return yaml.dump(self.serialize(False)) - - # This seemed like a good solution, but makes it difficult to - # compare python objects directly: - # - # def __repr__(self): - # return yaml.dump(self.serialize(True)) - - -class CaosDBPythonRecord(CaosDBPythonEntity): - pass - - -class CaosDBPythonRecordType(CaosDBPythonEntity): - pass - - -class CaosDBPythonProperty(CaosDBPythonEntity): - pass - - -class CaosDBMultiProperty: - """ - This implements a multi property using a python list. - """ - - def __init__(self): - raise NotImplementedError() - - -class CaosDBPythonFile(CaosDBPythonEntity): - def download(self, target=None): - if self.id is None: - raise RuntimeError("Cannot download file when id is missing.") - f = db.File(id=self.id).retrieve() - return f.download(target) - - -BASE_ATTRIBUTES = ( - "id", "name", "description", "version", "path", "file") - - -def _single_convert_to_python_object(robj: CaosDBPythonEntity, - entity: db.Entity, - references: Optional[db.Container] = None, - visited: Optional[Dict[int, - "CaosDBPythonEntity"]] = None): - """ - Convert a db.Entity from the standard API to a (previously created) - CaosDBPythonEntity from the high level API. - - This method will not resolve any unresolved references, so reference properties - as well as parents will become unresolved references in the first place. - - The optional third parameter can be used - to resolve references that occur in the converted entities and resolve them - to their correct representations. (Entities that are not found remain as - CaosDBPythonUnresolvedReferences.) - - Returns the input object robj. - """ - - # This parameter is used in the recursion to keep track of already visited - # entites (in order to detect cycles). - if visited is None: - visited = dict() - - if id(entity) in visited: - return visited[id(entity)] - else: - visited[id(entity)] = robj - - for base_attribute in BASE_ATTRIBUTES: - val = entity.__getattribute__(base_attribute) - if val is not None: - if isinstance(val, db.common.models.Version): - val = val.id - robj.__setattr__(base_attribute, val) - - for prop in entity.properties: - robj._set_property_from_entity(prop, entity.get_importance(prop), references, - visited) - - for parent in entity.parents: - robj.add_parent(CaosDBPythonUnresolvedParent(id=parent.id, - name=parent.name)) - - return robj - - -def _convert_property_value(propval): - if isinstance(propval, CaosDBPythonUnresolvedReference): - propval = propval.id - elif isinstance(propval, CaosDBPythonEntity): - propval = _single_convert_to_entity( - standard_type_for_high_level_type(propval)(), propval) - elif isinstance(propval, list): - propval = [_convert_property_value(element) for element in propval] - - # TODO: test case for list missing - - return propval - - -def _single_convert_to_entity(entity: db.Entity, - robj: CaosDBPythonEntity): - """ - Convert a CaosDBPythonEntity to an entity in standard pylib format. - - entity: db.Entity - An empty entity. - - robj: CaosDBPythonEntity - The CaosDBPythonEntity that is supposed to be converted to the entity. - """ - - for base_attribute in BASE_ATTRIBUTES: - if base_attribute in ("file", "path") and not isinstance(robj, CaosDBPythonFile): - continue - - # Skip version: - if base_attribute == "version": - continue - - val = robj.__getattribute__(base_attribute) - - if val is not None: - entity.__setattr__(base_attribute, val) - - for parent in robj.get_parents(): - if isinstance(parent, CaosDBPythonUnresolvedParent): - entity.add_parent(name=parent.name, id=parent.id) - elif isinstance(parent, CaosDBPythonRecordType): - raise NotImplementedError() - else: - raise RuntimeError("Incompatible class used as parent.") - - for prop in robj.get_properties(): - propval = robj.__getattribute__(prop) - metadata = robj.get_property_metadata(prop) - - propval = _convert_property_value(propval) - - entity.add_property( - name=prop, - value=propval, - unit=metadata.unit, - importance=metadata.importance, - datatype=metadata.datatype, - description=metadata.description, - id=metadata.id) - - return entity - - -def convert_to_entity(python_object): - if isinstance(python_object, db.Container): - # Create a list of objects: - - return [convert_to_entity(i) for i in python_object] - elif isinstance(python_object, CaosDBPythonRecord): - return _single_convert_to_entity(db.Record(), python_object) - elif isinstance(python_object, CaosDBPythonFile): - return _single_convert_to_entity(db.File(), python_object) - elif isinstance(python_object, CaosDBPythonRecordType): - return _single_convert_to_entity(db.RecordType(), python_object) - elif isinstance(python_object, CaosDBPythonProperty): - return _single_convert_to_entity(db.Property(), python_object) - elif isinstance(python_object, CaosDBPythonEntity): - return _single_convert_to_entity(db.Entity(), python_object) - else: - raise ValueError("Cannot convert an object of this type.") - - -def convert_to_python_object(entity: Union[db.Container, db.Entity], - references: Optional[db.Container] = None, - visited: Optional[Dict[int, - "CaosDBPythonEntity"]] = None): - """ - Convert either a container of CaosDB entities or a single CaosDB entity - into the high level representation. - - The optional second parameter can be used - to resolve references that occur in the converted entities and resolve them - to their correct representations. (Entities that are not found remain as - CaosDBPythonUnresolvedReferences.) - """ - if isinstance(entity, db.Container): - # Create a list of objects: - return [convert_to_python_object(i, references, visited) for i in entity] - - # TODO: recursion problems? - return _single_convert_to_python_object( - high_level_type_for_standard_type(entity)(), - entity, - references, - visited) - - -def new_high_level_entity(entity: db.RecordType, - importance_level: str, - name: Optional[str] = None): - """ - Create an new record in high level format based on a record type in standard format. - - entity: db.RecordType - The record type to initialize the new record from. - - importance_level: str - None, obligatory, recommended or suggested - Initialize new properties up to this level. - Properties in the record type with no importance will be added - regardless of the importance_level. - - name: str - Name of the new record. - """ - - r = db.Record(name=name) - r.add_parent(entity) - - impmap = { - None: 0, "SUGGESTED": 3, "RECOMMENDED": 2, "OBLIGATORY": 1} - - for prop in entity.properties: - imp = entity.get_importance(prop) - if imp is not None and impmap[importance_level] < impmap[imp]: - continue - - r.add_property(prop) - - return convert_to_python_object(r) - - -def create_record(rtname: str, name: Optional[str] = None, **kwargs): - """ - Create a new record based on the name of a record type. The new record is returned. - - rtname: str - The name of the record type. - - name: str - This is optional. A name for the new record. - - kwargs: - Additional arguments are used to set attributes of the - new record. - """ - obj = new_high_level_entity( - db.RecordType(name=rtname).retrieve(), "SUGGESTED", name) - for key, value in kwargs.items(): - obj.__setattr__(key, value) - return obj - - -def load_external_record(record_name: str): - """ - Retrieve a record by name and convert it to the high level API format. - """ - return convert_to_python_object(db.Record(name=record_name).retrieve()) - - -def create_entity_container(record: CaosDBPythonEntity): - """ - Convert this record into an entity container in standard format that can be used - to insert or update entities in a running CaosDB instance. - """ - ent = convert_to_entity(record) - lse: List[db.Entity] = [ent] - create_flat_list([ent], lse) - return db.Container().extend(lse) - - -def query(query: str, - resolve_references: Optional[bool] = True, - references: Optional[db.Container] = None): - """ - - """ - res = db.execute_query(query) - objects = convert_to_python_object(res) - if resolve_references: - for obj in objects: - obj.resolve_references(True, references) - return objects +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.high_level_api`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/__init__.py b/src/caosdb/utils/__init__.py index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..7401e1775dfb5975e12a8be3b0b3c68e5d0d2477 100644 --- a/src/caosdb/utils/__init__.py +++ b/src/caosdb/utils/__init__.py @@ -0,0 +1,6 @@ + +from linkahead.utils import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/caosdb_admin.py b/src/caosdb/utils/caosdb_admin.py old mode 100755 new mode 100644 index 09a8f64a3c6b9f0825089949840a8791604d1ded..d421c6fc7879a4e8aa14834746aeda4f4eced69b --- a/src/caosdb/utils/caosdb_admin.py +++ b/src/caosdb/utils/caosdb_admin.py @@ -1,657 +1,6 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""A small caosdb client with a focus on administration of the server.""" +from linkahead.utils.caosdb_admin import * +from warnings import warn -from __future__ import print_function, unicode_literals - -import getpass -import sys -from argparse import ArgumentParser, RawDescriptionHelpFormatter - -import caosdb as db -from caosdb import administration as admin -from caosdb.exceptions import HTTPClientError - -__all__ = [] -__version__ = 0.3 -__date__ = '2016-09-19' -__updated__ = '2018-12-11' - - -def do_update_role(args): - admin._update_role(name=args.role_name, description=args.role_description) - - -def do_create_role(args): - admin._insert_role(name=args.role_name, description=args.role_description) - - -def do_retrieve_role(args): - print(admin._retrieve_role(name=args.role_name)) - - -def do_delete_role(args): - admin._delete_role(name=args.role_name) - - -def do_retrieve(args): - c = None - - if args.query: - if len(args.entities) > 1: - raise Exception("Only one query at a time can be retrieved.") - c = db.execute_query(args.entities[0], flags=eval(args.flags)) - else: - c = db.Container() - - for i in args.entities: - try: - eid = int(i) - c.append(db.Entity(id=eid)) - except ValueError: - c.append(db.Entity(name=i)) - c.retrieve(flags=eval(args.flags)) - print(c) - - -def do_update(args): - fdict = eval(args.flags) - xml = open(args.xml_path, "r") - ret = db.get_connection().update( - entity_uri_segment=["Entity"], reconnect=True, body=xml) - db.Container._response_to_entities(ret) - - -def do_delete(args): - c = db.Container() - - for i in args.entities: - c.append(db.Entity(id=i)) - - c.delete() - - -def do_insert(args): - fdict = eval(args.flags) - xml = open(args.xml_path, "r") - ret = db.get_connection().insert( - entity_uri_segment=["Entity"], - reconnect=True, - query_dict=fdict, - body=xml) - print(db.Container._response_to_entities(ret)) - - -def _promt_for_pw(): - password = getpass.getpass(prompt="Please type password: ") - password2 = getpass.getpass(prompt="Please type password again: ") - - if password != password2: - raise Exception("Password strings didn't match") - - return password - - -def do_create_user(args): - password = args.user_password - - if args.ask_password is True: - password = _promt_for_pw() - try: - admin._insert_user(name=args.user_name, - email=args.user_email, password=password) - - if args.activate_user: - do_activate_user(args) - except HTTPClientError as e: - print(e.msg) - - -def do_activate_user(args): - try: - admin._update_user(name=args.user_name, status="ACTIVE") - except HTTPClientError as e: - print(e.msg) - - -def do_deactivate_user(args): - try: - admin._update_user(name=args.user_name, status="INACTIVE") - except HTTPClientError as e: - print(e.msg) - - -def do_set_user_password(args): - if args.user_password is None: - password = _promt_for_pw() - else: - password = args.user_password - try: - admin._update_user(name=args.user_name, password=password) - except HTTPClientError as e: - print(e.msg) - - -def do_add_user_roles(args): - roles = admin._get_roles(username=args.user_name, realm=None) - - for r in args.user_roles: - roles.add(r) - admin._set_roles(username=args.user_name, roles=roles) - - -def do_remove_user_roles(args): - roles = admin._get_roles(username=args.user_name, realm=None) - - for r in args.user_roles: - if r in roles: - roles.remove(r) - admin._set_roles(username=args.user_name, roles=roles) - - -def do_set_user_entity(args): - admin._update_user(name=args.user_name, entity=args.user_entity) - - -def do_reset_user_entity(args): - admin._update_user(name=args.user_name, entity="") - - -def do_set_user_email(args): - admin._update_user(name=args.user_name, email=args.user_email) - - -def do_retrieve_user(args): - print(admin._retrieve_user(name=args.user_name)) - - -def do_delete_user(args): - admin._delete_user(name=args.user_name) - - -def do_retrieve_user_roles(args): - print(admin._get_roles(username=args.user_name)) - - -def do_retrieve_role_permissions(args): - print(admin._get_permissions(role=args.role_name)) - - -def do_grant_role_permissions(args): - perms = admin._get_permissions(args.role_name) - - for p in args.role_permissions: - g = admin.PermissionRule( - action="Grant", permission=p, priority=args.permissions_priority) - d = admin.PermissionRule( - action="Deny", permission=p, priority=args.permissions_priority) - - if g in perms: - perms.remove(g) - - if d in perms: - perms.remove(d) - perms.add(g) - admin._set_permissions(role=args.role_name, permission_rules=perms) - - -def do_revoke_role_permissions(args): - perms = admin._get_permissions(args.role_name) - - for p in args.role_permissions: - g = admin.PermissionRule( - action="Grant", permission=p, priority=args.permissions_priority) - d = admin.PermissionRule( - action="Deny", permission=p, priority=args.permissions_priority) - - if g in perms: - perms.remove(g) - - if d in perms: - perms.remove(d) - admin._set_permissions(role=args.role_name, permission_rules=perms) - - -def do_deny_role_permissions(args): - perms = admin._get_permissions(args.role_name) - - for p in args.role_permissions: - g = admin.PermissionRule( - action="Grant", permission=p, priority=args.permissions_priority) - d = admin.PermissionRule( - action="Deny", permission=p, priority=args.permissions_priority) - - if g in perms: - perms.remove(g) - - if d in perms: - perms.remove(d) - perms.add(d) - admin._set_permissions(role=args.role_name, permission_rules=perms) - - -def do_retrieve_entity_acl(args): - entities = db.execute_query(q=args.query, flags={"ACL": None}) - - for entity in entities: - print(entity.id) - print(entity.acl) - - -def do_action_entity_permissions(args): - entities = db.execute_query(q=args.query, flags={"ACL": None}) - - for entity in entities: - for p in args.permissions: - getattr(entity, args.action)(role=args.role, priority=args.priority, - permission=p) - entities.update(flags={"ACL": None}) - - for entity in entities: - print(entity.id) - print(entity.acl) - - -def main(argv=None): - """Command line options.""" - - if argv is None: - argv = sys.argv - else: - sys.argv.extend(argv) - - # program_name = os.path.basename(sys.argv[0]) - program_version = "v%s" % __version__ - program_build_date = str(__updated__) - program_version_message = '%%(prog)s %s (%s)' % ( - program_version, program_build_date) - program_shortdesc = __import__('__main__').__doc__ - program_license = '''%s - -USAGE -''' % (program_shortdesc) - - # Setup argument parser - parser = ArgumentParser(description=program_license, - formatter_class=RawDescriptionHelpFormatter) - parser.add_argument('-V', '--version', action='version', - version=program_version_message) - parser.add_argument("--auth-token", metavar="AUTH_TOKEN", - dest="auth_token", - help=("A CaosDB authentication token (default: None). " - "If the authentication token is passed, the " - "`password_method` of the connection is set to " - "`auth_token` and the respective configuration " - "from the pycaosdb.ini is effectively being " - "overridden.\nTODO: Also allow passing the token " - "via environmenty variables.")) - subparsers = parser.add_subparsers( - title="commands", - metavar="COMMAND", - description="You can invoke the following commands. Print the detailed help for each command with #> caosdb_admin COMMAND -h") - - # users (CRUD) - subparser = subparsers.add_parser( - "create_user", - help="Create a new user in caosdb's internal user database. You need " - " to activate the user before use.") - subparser.set_defaults(call=do_create_user) - mg = subparser.add_mutually_exclusive_group() - mg.add_argument("-a", "--ask-password", - help="Prompt for a password.", action="store_true") - mg.add_argument( - "--password", - dest="user_password", - default=None, - help="Alternative way to provide the new user's password. Please " - "consider to use the more secure, interactive way (-a option).") - subparser.add_argument("-c", "--activate-user", - help="Activate the user after creation.", - action="store_true") - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="A user name which is unique in the internal user database.") - subparser.add_argument( - metavar="EMAIL", - nargs='?', - dest="user_email", - help="The email address of the new user.") - - subparser = subparsers.add_parser( - "activate_user", help="(Re-)activate an inactive (but existing) user.") - subparser.set_defaults(call=do_activate_user) - subparser.add_argument(metavar='USERNAME', dest="user_name", - help="The name of the user who is to be activated.") - - subparser = subparsers.add_parser( - "deactivate_user", help="Deactivate an active user.") - subparser.set_defaults(call=do_deactivate_user) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who is to be deactivated.") - - subparser = subparsers.add_parser( - "set_user_password", - help="Set a new password for a user. " - "By default, you will be prompted for the password.") - subparser.set_defaults(call=do_set_user_password) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who's password is to be set.") - subparser.add_argument( - metavar='PASSWORD', - nargs="?", - dest="user_password", - default=None, - help="Alternative way to provide the user's new password. " - "The more secure (and default way) is to provide it interactively.") - - subparser = subparsers.add_parser( - "set_user_entity", - help="Associate a user with an existing entity (which should represent a person, a program, an organization or something similar).") - subparser.set_defaults(call=do_set_user_entity) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who's associated entity you want to set.") - subparser.add_argument(metavar='ENTITY', dest="user_entity", - help="An ID of an existing entity.") - - subparser = subparsers.add_parser( - "reset_user_entity", - help="Terminate the association of a user with an entity.") - subparser.set_defaults(call=do_reset_user_entity) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who's associated entity you want to reset.") - - subparser = subparsers.add_parser( - "set_user_email", help="Set a new email for a user.") - subparser.set_defaults(call=do_set_user_email) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who's email is to be set.") - subparser.add_argument( - metavar='EMAIL', - dest="user_email", - help="The name of the user who's email is to be set.") - - subparser = subparsers.add_parser( - "retrieve_user", help="Retrieve a user (email, entity)") - subparser.set_defaults(call=do_retrieve_user) - subparser.add_argument( - metavar='USERNAME', dest="user_name", help="The name of the user.") - - subparser = subparsers.add_parser( - "delete_user", - help="Delete a user from caosdb's internal user database.") - subparser.set_defaults(call=do_delete_user) - subparser.add_argument(metavar='USERNAME', dest="user_name", - help="The name of the user who is to be deleted.") - - # user roles - subparser = subparsers.add_parser( - "add_user_roles", help="Extend the roles of a user.") - subparser.set_defaults(call=do_add_user_roles) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user who's roles are to be extended.") - subparser.add_argument( - metavar='ROLES', - dest="user_roles", - nargs='+', - help="A space separated list of (existing) roles.") - - subparser = subparsers.add_parser( - "remove_user_roles", help="Remove some of the roles of a user.") - subparser.set_defaults(call=do_remove_user_roles) - subparser.add_argument( - metavar='USERNAME', - dest="user_name", - help="The name of the user from whom you want to take some roles away.") - subparser.add_argument( - metavar='ROLES', - dest="user_roles", - nargs='+', - help="A space separated list of (existing) roles.") - - subparser = subparsers.add_parser( - "retrieve_user_roles", help="Retrieve a user's roles.") - subparser.set_defaults(call=do_retrieve_user_roles) - subparser.add_argument( - metavar='USERNAME', dest="user_name", help="The name of the user.") - - # role permissions - subparser = subparsers.add_parser( - "retrieve_role_permissions", - help="Retrieve the set of permission rules of a role.") - subparser.set_defaults(call=do_retrieve_role_permissions) - subparser.add_argument( - metavar='ROLE', - dest="role_name", - help="The name of the role which permissions are to be retrieved.") - - subparser = subparsers.add_parser( - "grant_role_permissions", help="Grant permissions to a role.") - subparser.set_defaults(call=do_grant_role_permissions) - subparser.add_argument( - '--priority', - dest="permissions_priority", - action="store_true", - default=False, - help="This flag enables priority permission rules.") - subparser.add_argument( - metavar='ROLE', - dest="role_name", - help="The name of the role to which the permissions are to be granted.") - subparser.add_argument( - metavar='PERMISSIONS', - dest="role_permissions", - nargs='+', - help="A space separated list of permissions.") - - subparser = subparsers.add_parser( - "revoke_role_permissions", - help="Remove previously granted or denied permissions from a role.") - subparser.set_defaults(call=do_revoke_role_permissions) - subparser.add_argument( - '--priority', - dest="permissions_priority", - action="store_true", - default=False, - help="This flag is needed to revoke priority permissions.") - subparser.add_argument( - metavar='ROLE', - dest="role_name", - help="The name of the role from which you want to revoke permissions.") - subparser.add_argument( - metavar='PERMISSIONS', - dest="role_permissions", - nargs='+', - help="A space separated list of permissions.") - - subparser = subparsers.add_parser( - "deny_role_permissions", help="Deny a role permissions.") - subparser.set_defaults(call=do_deny_role_permissions) - subparser.add_argument( - '--priority', - dest="permissions_priority", - action="store_true", - default=False, - help="This flag enables priority permission rules.") - subparser.add_argument( - metavar='ROLE', - dest="role_name", - help="The name of the role which you want to deny permissions.") - subparser.add_argument( - metavar='PERMISSIONS', - dest="role_permissions", - nargs='+', - help="A space separated list of permissions.") - - # entities (CRUD) - subparser = subparsers.add_parser("insert", help="Insert entities.") - subparser.set_defaults(call=do_insert) - subparser.add_argument( - '-f', - '--flags', - dest="flags", - help="A python dictionary (dict) with flag keys and their values.", - metavar="FLAGS", - default="{}") - subparser.add_argument(metavar='PATH', dest="xml_path", - help="Path to an xml file.") - - subparser = subparsers.add_parser("retrieve", help="Retrieve entities.") - subparser.set_defaults(call=do_retrieve) - subparser.add_argument( - '-f', - '--flags', - dest="flags", - help="A python dictionary (dict) with flag keys and their values.", - metavar="FLAGS", - default="{}") - subparser.add_argument('-q', '--query', dest='query', action="store_true", - help="If the ENTITIES argument is a query.") - subparser.add_argument(metavar='ENTITIES', dest="entities", nargs='+', - help="A space separated list of ids or names of" - "entities or ai single query.") - - subparser = subparsers.add_parser("update", help="Update entities.") - subparser.set_defaults(call=do_update) - subparser.add_argument( - '-f', - '--flags', - dest="flags", - help="A python dictionary (dict) with flag keys and their values.", - metavar="FLAGS", - default="{}") - subparser.add_argument(metavar='PATH', dest="xml_path", - help="Path to an xml file.") - - subparser = subparsers.add_parser("delete", help="Delete entities.") - subparser.set_defaults(call=do_delete) - subparser.add_argument( - '-f', - '--flags', - dest="flags", - help="A python dictionary (dict) with flag keys and their values.", - metavar="FLAGS", - default="{}") - subparser.add_argument( - metavar='ENTITIES', - dest="entities", - nargs='+', - help="A space separated list of ids or names of entities.") - - # roles (CRUD) - create_role_parser = subparsers.add_parser( - "create_role", help="Create a new role.") - create_role_parser.set_defaults(call=do_create_role) - create_role_parser.add_argument( - dest="role_name", metavar="ROLENAME", help="The name of the new role.") - create_role_parser.add_argument( - dest="role_description", - metavar="DESCRIPTION", - help="A description of the role's purpose, it's intended use case, characteristics of the users who have this role, etc.") - - retrieve_role_parser = subparsers.add_parser( - "retrieve_role", help="Retrieve the description of an existing role.") - retrieve_role_parser.set_defaults(call=do_retrieve_role) - retrieve_role_parser.add_argument( - dest="role_name", - metavar="ROLENAME", - help="The name of the existing role.") - - update_role_parser = subparsers.add_parser( - "update_role", help="Change the description of an existing role.") - update_role_parser.set_defaults(call=do_update_role) - update_role_parser.add_argument( - dest="role_name", - metavar="ROLENAME", - help="The name of the existing role.") - update_role_parser.add_argument( - dest="role_description", - metavar="DESCRIPTION", - help="A new description of the role's purpose, it's intended use case, characteristics of the users who have this role, etc.") - - delete_role_parser = subparsers.add_parser( - "delete_role", help="Delete a role.") - delete_role_parser.set_defaults(call=do_delete_role) - delete_role_parser.add_argument( - dest="role_name", - metavar="ROLENAME", - help="The name of the existing role.") - - # entity acl - retrieve_entity_acl_parser = subparsers.add_parser( - "retrieve_entity_acl", help="Retrieve an entity ACL.") - retrieve_entity_acl_parser.set_defaults(call=do_retrieve_entity_acl) - retrieve_entity_acl_parser.add_argument(dest="query", metavar="QUERY", - help="A FIND query.") - - for action in ["grant", "deny", "revoke_denial", "revoke_grant"]: - action_entity_permissions_parser = subparsers.add_parser( - f"{action}_entity_permissions", - help=f"{action} entity permissions to one or more Entities.") - action_entity_permissions_parser.set_defaults( - call=do_action_entity_permissions, action=action) - action_entity_permissions_parser.add_argument(dest="query", metavar="QUERY", - help="A FIND query.") - action_entity_permissions_parser.add_argument(dest="role", metavar="ROLE", - help="The name of an exising role.") - action_entity_permissions_parser.add_argument( - dest="permissions", - metavar="PERMISSION", - help="A list of permissions", - nargs='+') - action_entity_permissions_parser.add_argument( - '--priority', - dest="priority", - action="store_true", - default=False, - help="This flag enables priority permission rules.") - - # Process arguments - args = parser.parse_args() - auth_token = args.auth_token - if auth_token is not None: - db.configure_connection(password_method="auth_token", - auth_token=auth_token) - else: - db.configure_connection() - - return args.call(args) - - -if __name__ == "__main__": - sys.exit(main()) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.caosdb_admin`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/checkFileSystemConsistency.py b/src/caosdb/utils/checkFileSystemConsistency.py old mode 100755 new mode 100644 index 6c053fdca6acb3a6585589c0e6298ba0704ea590..279db9895f16153a515872c8c95c66dc90dbdf99 --- a/src/caosdb/utils/checkFileSystemConsistency.py +++ b/src/caosdb/utils/checkFileSystemConsistency.py @@ -1,128 +1,6 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""requests the server to execute checkFileSystemConsistency job.""" +from linkahead.utils.checkFileSystemConsistency import * +from warnings import warn -import sys -import caosdb as db - -from argparse import ArgumentParser -from argparse import RawDescriptionHelpFormatter - -__all__ = [] -__version__ = 0.1 -__date__ = '2016-08-31' -__updated__ = '2016-09-01' - - -class CLIError(Exception): - """Generic exception to raise and log different fatal errors.""" - - def __init__(self, msg): - super(CLIError).__init__(type(self)) - self.msg = "E: %s" % msg - - def __str__(self): - return self.msg - - def __unicode__(self): - return self.msg - - -def runCheck(timeout, location): - """ Request the caosdb server to check the file system for consistency. - - location == None means that the whole file system is being checked. - Otherwise only a the directory tree under location is being checked. - """ - - if (timeout is not None): - db.get_config().set("Connection", "timeout", str(100 + int(timeout))) - files = db.Container().retrieve( - unique=False, raise_exception_on_error=False, flags={ - "fileStorageConsistency": ( - "-t " + str(timeout) if timeout else "") + ( - location if location else ""), }) - return files - - -def main(argv=None): - """Command line options.""" - - if argv is None: - argv = sys.argv - else: - sys.argv.extend(argv) - - # program_name = os.path.basename(sys.argv[0]) - program_version = "v%s" % __version__ - program_build_date = str(__updated__) - program_version_message = '%%(prog)s %s (%s)' % ( - program_version, program_build_date) - program_license = ''' - - Copyright 2016 BMPG. All rights reserved. - - Distributed on an "AS IS" basis without warranties - or conditions of any kind, either express or implied. - -USAGE -''' - - # Setup argument parser - parser = ArgumentParser(description=program_license, - formatter_class=RawDescriptionHelpFormatter) - parser.add_argument( - "-v", - "--verbose", - dest="verbose", - action="count", - help="set verbosity level [default: %(default)s]", - default=0) - parser.add_argument('-V', '--version', action='version', - version=program_version_message) - parser.add_argument( - '-t', - '--timeout', - dest="timeout", - help="timeout in seconds for the database requests. [default: %(default)s]", - metavar="TIMEOUT", - default="200") - parser.add_argument('location') - - # Process arguments - args = parser.parse_args() - global VERBOSITY - - VERBOSITY = args.verbose - TIMEOUT = args.timeout - - print(runCheck(TIMEOUT, args.location).messages) - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.checkFileSystemConsistency`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/create_revision.py b/src/caosdb/utils/create_revision.py index 419e1c9f2b97171be0dccf1bc772ae5db679c0b7..6eb2ab9c792db0040f92073b937c7798a3fa3d46 100644 --- a/src/caosdb/utils/create_revision.py +++ b/src/caosdb/utils/create_revision.py @@ -1,95 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""provides utilities for creating revisions of database entries.""" -import caosdb as db -from caosdb import INTEGER, LIST +from linkahead.utils.create_revision import * +from warnings import warn - -def bend_references(from_id, to_id, except_for=None): - """redirects all references to a new target. - - All entities having a reference pointing to from_id are found - and those references are changed to point to to_id. - entities having an id listed in except_for are excluded. - -Parameters ----------- - -from_id : int - the old object to which references where pointing -to_id : int - the new object to which references will be pointing -except_for : list of int - entities with id of this list will not be changed - """ - if except_for is None: - except_for = [to_id] - entities = db.execute_query( - "FIND ENTITY WHICH references {}".format(from_id)) - for ent in entities: - if ent.id in except_for: - continue - for prop in ent.properties: - if isinstance( - prop.value, int) and ( - prop.datatype != INTEGER) and ( - prop.value == from_id): - prop.value = to_id - if (isinstance(prop.value, list) - and len(prop.value) > 0 - and isinstance(prop.value[0], int) - and (prop.datatype != LIST(INTEGER)) - and from_id in prop.value): - index = prop.value.index(from_id) - prop.value[index] = to_id - ent.update() - - -def create_revision(old_id, prop, value): - """creates a revision of an existing record. - - This function changes the record with id old_id. The value of the - propertye prop is changed to value. - -Parameters ----------- - -old_id : int - id of the record to be changed -prop : string - name of the property to be changed -value : type of corresponding property - the new value of the corresponding property -""" - record = db.execute_query("FIND {}".format(old_id))[0] - new_rec = record.copy() - new_rec.get_property(prop).value = value - try: - new_rec.remove_property("revisionOf") - except BaseException: - pass - new_rec.add_property(name="revisionOf", value=record.id) - new_rec.insert() - bend_references(record.id, new_rec.id) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.create_revision`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/get_entity.py b/src/caosdb/utils/get_entity.py index a27aafa99ffe3759a46876a5bcd5e686d631b1dc..9e41090e6e91c371c740487bc3d482a738ff2127 100644 --- a/src/caosdb/utils/get_entity.py +++ b/src/caosdb/utils/get_entity.py @@ -1,49 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> -# Copyright (C) 2023 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -"""Convenience functions to retrieve a specific entity.""" +from linkahead.utils.get_entity import * +from warnings import warn -from typing import Union -from ..common.models import execute_query, Entity - - -def get_entity_by_name(name: str) -> Entity: - """Return the result of a unique query that uses the name to find the correct entity. - - Submits the query "FIND ENTITY WITH name='{name}'". - """ - return execute_query(f"FIND ENTITY WITH name='{name}'", unique=True) - - -def get_entity_by_id(eid: Union[str, int]) -> Entity: - """Return the result of a unique query that uses the id to find the correct entity. - - Submits the query "FIND ENTITY WITH id='{eid}'". - """ - return execute_query(f"FIND ENTITY WITH id='{eid}'", unique=True) - - -def get_entity_by_path(path: str) -> Entity: - """Return the result of a unique query that uses the path to find the correct file. - - Submits the query "FIND FILE WHICH IS STORED AT '{path}'". - """ - return execute_query(f"FIND FILE WHICH IS STORED AT '{path}'", unique=True) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.get_entity`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/git_utils.py b/src/caosdb/utils/git_utils.py index 7a58272a3bef1930f75a1e08364349388e2bb89f..4f9d24382cdc4dd8e96b8be934c461195cf3aee9 100644 --- a/src/caosdb/utils/git_utils.py +++ b/src/caosdb/utils/git_utils.py @@ -1,82 +1,6 @@ -# -*- coding: utf-8 -*- -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""git-utils: Some functions for retrieving information about git repositories. -""" +from linkahead.utils.git_utils import * +from warnings import warn -import logging -import tempfile - -from subprocess import call - -logger = logging.getLogger(__name__) - - -def get_origin_url_in(folder: str): - """return the Fetch URL of the git repository in the given folder.""" - with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: - call(["git", "remote", "show", "origin"], stdout=t, cwd=folder) - with open(t.name, "r") as t: - urlString = "Fetch URL:" - - for line in t.readlines(): - if urlString in line: - return line[line.find(urlString) + len(urlString):].strip() - - return None - - -def get_diff_in(folder: str, save_dir=None): - """returns the name of a file where the out put of "git diff" in the given - folder is stored.""" - with tempfile.NamedTemporaryFile(delete=False, mode="w", dir=save_dir) as t: - call(["git", "diff"], stdout=t, cwd=folder) - - return t.name - - -def get_branch_in(folder: str): - """returns the current branch of the git repository in the given folder. - - The command "git branch" is called in the given folder and the - output is returned - """ - with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: - call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=t, cwd=folder) - with open(t.name, "r") as t: - return t.readline().strip() - - -def get_commit_in(folder: str): - """returns the commit hash in of the git repository in the given folder. - - The command "git log -1 --format=%h" is called in the given folder - and the output is returned - """ - - with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: - call(["git", "log", "-1", "--format=%h"], stdout=t, cwd=folder) - with open(t.name, "r") as t: - return t.readline().strip() +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.git_utils`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/linkahead_admin.py b/src/caosdb/utils/linkahead_admin.py new file mode 100644 index 0000000000000000000000000000000000000000..69d2846219c57478d1037fe22e5276b6a803ac7c --- /dev/null +++ b/src/caosdb/utils/linkahead_admin.py @@ -0,0 +1,6 @@ + +from linkahead.utils.linkahead_admin import * +from warnings import warn + +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.linkahead_admin`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/plantuml.py b/src/caosdb/utils/plantuml.py index 6252a48983c62e7a2f33113422205209d616b5b6..07a401604f2ae570e2e6506ebaadbbe85d8228f7 100644 --- a/src/caosdb/utils/plantuml.py +++ b/src/caosdb/utils/plantuml.py @@ -1,415 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""Utilities for work with PlantUML. +from linkahead.utils.plantuml import * +from warnings import warn -PlantUML (http://plantuml.com) is a converter from a simple -descriptive language to graphviz diagrams. - -To convert the output, you can write it into FILENAME.pu and then -convert it with: - -plantuml FILENAME.pu -> FILENAME.png -""" - -import os -import shutil - -import caosdb as db -from caosdb.common.datatype import is_reference, get_referenced_recordtype - -from typing import List, Optional - -import tempfile - -REFERENCE = "REFERENCE" - - -def get_description(description_str): - """Extract and format a description string from a record type or property. - - Parameters - ---------- - description_str : str - The description string that is going to be formatted. - - Returns - ------- - str - The reformatted description ending in a line break. - """ - words = description_str.split() - lines = [] - lines.append("") - - for w in words: - if len(lines[-1] + w) > 60: - lines.append("") - - if len(lines[-1]) > 0: - lines[-1] += " " - lines[-1] += w - description = "\n".join(lines) - - return description + "\n" - - -class Grouped(object): - def __init__(self, name, parents): - self.name = name - self.parents = parents - - def get_parents(self): - return self.parents - - -def recordtypes_to_plantuml_string(iterable, - add_properties: bool = True, - add_recordtypes: bool = True, - add_legend: bool = True, - no_shadow: bool = False, - style: str = "default"): - """Converts RecordTypes into a string for PlantUML. - - This function obtains an iterable and returns a string which can - be input into PlantUML for a representation of all RecordTypes in - the iterable. - - Current options for style - ------------------------- - - "default" - Standard rectangles with uml class circle and methods section - "salexan" - Round rectangles, hide circle and methods section - - Current limitations - ------------------- - - - It is inherently hard to detect if an element should be rendered - as a class/RecordType or not. Currently it is rendered if - either the "type" attribute is None or - type(element) == RecordType. - - Inheritance of Properties is not rendered nicely at the moment. - - Parameters - ---------- - iterable: iterable of caosdb.Entity - The objects to be rendered with plantuml. - - no_shadow : bool, optional - If true, tell plantuml to use a skin without blurred shadows. - - - Returns - ------- - out : str - The plantuml string for the given container. - """ - - # TODO: This function needs a review of python type hints. - - classes = [el for el in iterable - if isinstance(el, db.RecordType)] - dependencies = {} - inheritances = {} - properties = [p for p in iterable if isinstance(p, db.Property)] - grouped = [g for g in iterable if isinstance(g, Grouped)] - - def _add_properties(c, importance=None): - result = "" - - for p in c.get_properties(): - if importance is None or c.get_properties().get_importance(p) == importance: - if importance is not None and len(result) == 0: - result += ".." + importance.lower() + "..\n" - name = p.name - p_type = p.datatype - - if p_type is None: - # get type from properties - - for p2 in properties: - if p2.name == p.name: - p_type = p2.datatype - - if p_type is None: - # is reference? - - for p2 in classes: - if p2.name == p.name: - p_type = p2 - - if isinstance(p_type, db.Entity): - p_type = p_type.name - dependencies[c].append(p_type) - elif p_type is not None: - for c2 in classes: - if c2.name == p_type or db.LIST(c2.name) == p_type: - dependencies[c].append(c2.name) - result += ' {name} ({type})\n'.format( - name=name, type=p_type) - - return result - - result = "@startuml\n\n" - - if no_shadow: - result += "skinparam shadowing false\n" - - if style == "default": - result += "skinparam classAttributeIconSize 0\n" - elif style == "salexan": - result += """skinparam roundcorner 20\n -skinparam boxpadding 20\n -\n -hide methods\n -hide circle\n -""" - else: - raise ValueError("Unknown style.") - - if add_properties: - result += "package Properties #DDDDDD {\n" - for p in properties: - inheritances[p] = p.get_parents() - dependencies[p] = [] - - result += "class \"{klass}\" << (P,#008800) >> {{\n".format(klass=p.name) - - if p.description is not None: - result += get_description(p.description) - result += "\n..\n" - - if isinstance(p.datatype, str): - result += "datatype: " + p.datatype + "\n" - elif isinstance(p.datatype, db.Entity): - result += "datatype: " + p.datatype.name + "\n" - else: - result += "datatype: " + str(p.datatype) + "\n" - result += "}\n\n" - result += "}\n\n" - - if add_recordtypes: - result += "package RecordTypes #DDDDDD {\n" - - for c in classes: - inheritances[c] = c.get_parents() - dependencies[c] = [] - result += "class \"{klass}\" << (C,#FF1111) >> {{\n".format(klass=c.name) - - if c.description is not None: - result += get_description(c.description) - - props = "" - props += _add_properties(c, importance=db.FIX) - props += _add_properties(c, importance=db.OBLIGATORY) - props += _add_properties(c, importance=db.RECOMMENDED) - props += _add_properties(c, importance=db.SUGGESTED) - - if len(props) > 0: - result += "__Properties__\n" + props - else: - result += "\n..\n" - result += "}\n\n" - - for g in grouped: - inheritances[g] = g.get_parents() - result += "class \"{klass}\" << (G,#0000FF) >> {{\n".format(klass=g.name) - result += "}\n\n" - - for c, parents in inheritances.items(): - for par in parents: - result += "\"{par}\" <|-- \"{klass}\"\n".format( - klass=c.name, par=par.name) - - for c, deps in dependencies.items(): - for dep in deps: - result += "\"{klass}\" *-- \"{dep}\"\n".format( - klass=c.name, dep=dep) - - if add_legend: - result += """ - -package \"B is a subtype of A\" <<Rectangle>> { - A <|-right- B - note "This determines what you find when you query for the RecordType.\\n'FIND RECORD A' will provide Records which have a parent\\nA or B, while 'FIND RECORD B' will provide only Records which have a parent B." as N1 -} -""" - result += """ - -package \"The property P references an instance of D\" <<Rectangle>> { - class C { - P(D) - } - C *-right- D - note "Employ this when searching for C: 'FIND RECORD C WITH D'\\nOr if the value of D is a Record: 'FIND RECORD C WHICH REFERENCES D' is possible.\\nEmploying this while searching for D: 'FIND RECORD D WHICH IS REFERENCED BY C" as N2 -} - -""" - - result += "\n@enduml\n" - - return result - - -def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None, cleanup=True): - """Recursively retrieves CaosDB record types and properties, starting - from given initial types up to a specific depth. - - Parameters - ---------- - start_record_types : Iterable[db.Entity] - Iterable with the entities to be displayed. Starting from these - entities more entities will be retrieved. - depth : int - The maximum depth up to which to retriev sub entities. - result_id_set : set[int] - Used by recursion. Filled with already visited ids. - result_container : db.Container - Used by recursion. Filled with already visited entities. - cleanup : bool - Used by recursion. If True return the resulting result_container. - Don't return anything otherwise. - - Returns - ------- - db.Container - A container containing all the retrieved entites - or None if cleanup is False. - """ - # Initialize the id set and result container for level zero recursion depth: - if result_id_set is None: - result_id_set = set() - if result_container is None: - result_container = db.Container() - - for entity in start_record_types: - entity.retrieve() - if entity.id not in result_id_set: - result_container.append(entity) - result_id_set.add(entity.id) - for prop in entity.properties: - if (is_reference(prop.datatype) and prop.datatype != db.FILE and depth > 0): - rt = db.RecordType( - name=get_referenced_recordtype(prop.datatype)).retrieve() - retrieve_substructure([rt], depth-1, result_id_set, - result_container, False) - # TODO: clean up this hack - # TODO: make it also work for files - if is_reference(prop.datatype) and prop.value is not None: - r = db.Record(id=prop.value).retrieve() - retrieve_substructure([r], depth-1, result_id_set, result_container, False) - if r.id not in result_id_set: - result_container.append(r) - result_id_set.add(r.id) - - if prop.id not in result_id_set: - result_container.append(prop) - result_id_set.add(prop.id) - - for parent in entity.parents: - rt = db.RecordType(id=parent.id).retrieve() - if parent.id not in result_id_set: - result_container.append(rt) - result_id_set.add(parent.id) - if depth > 0: - retrieve_substructure([rt], depth-1, result_id_set, - result_container, False) - - if cleanup: - return result_container - return None - - -def to_graphics(recordtypes: List[db.Entity], filename: str, - output_dirname: Optional[str] = None, - formats: List[str] = ["tsvg"], - silent: bool = True, - add_properties: bool = True, - add_recordtypes: bool = True, - add_legend: bool = True, - no_shadow: bool = False, - style: str = "default"): - """Calls recordtypes_to_plantuml_string(), saves result to file and - creates an svg image - - plantuml needs to be installed. - - Parameters - ---------- - recordtypes : Iterable[db.Entity] - Iterable with the entities to be displayed. - filename : str - filename of the image without the extension(e.g. data_structure; - also without the preceeding path. - data_structure.pu and data_structure.svg will be created.) - output_dirname : str - the destination directory for the resulting images as defined by the "-o" - option by plantuml - default is to use current working dir - formats : List[str] - list of target formats as defined by the -t"..." options by plantuml, e.g. "tsvg" - silent : bool - Don't output messages. - no_shadow : bool, optional - If true, tell plantuml to use a skin without blurred shadows. - """ - pu = recordtypes_to_plantuml_string(iterable=recordtypes, - add_properties=add_properties, - add_recordtypes=add_recordtypes, - add_legend=add_legend, - no_shadow=no_shadow, - style=style) - - if output_dirname is None: - output_dirname = os.getcwd() - - allowed_formats = [ - "tpng", "tsvg", "teps", "tpdf", "tvdx", "txmi", - "tscxml", "thtml", "ttxt", "tutxt", "tlatex", "tlatex:nopreamble"] - - with tempfile.TemporaryDirectory() as td: - - pu_filename = os.path.join(td, filename + ".pu") - with open(pu_filename, "w") as pu_file: - pu_file.write(pu) - - for format in formats: - extension = format[1:] - if ":" in extension: - extension = extension[:extension.index(":")] - - if format not in allowed_formats: - raise RuntimeError("Format not allowed.") - cmd = "plantuml -{} {}".format(format, pu_filename) - if not silent: - print("Executing:", cmd) - - if os.system(cmd) != 0: # TODO: replace with subprocess.run - raise Exception("An error occured during the execution of " - "plantuml when using the format {}. " - "Is plantuml installed? " - "You might want to dry a different format.".format(format)) - # copy only the final product into the target directory - shutil.copy(os.path.join(td, filename + "." + extension), - output_dirname) +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.plantuml`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/register_tests.py b/src/caosdb/utils/register_tests.py index 9d0afcbb0845e1d8d31622e8ab9926f26f7e78f6..7b8622891f79fb8e020a85f23dbbb42d7f66cc91 100644 --- a/src/caosdb/utils/register_tests.py +++ b/src/caosdb/utils/register_tests.py @@ -1,136 +1,6 @@ -#!/usr/bin/env python -# encoding: utf-8 -# -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de> -# Copyright (C) 2022 Timm Fitschen <t.fitschen@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -import caosdb as db -from caosdb import administration as admin +from linkahead.utils.register_tests import * +from warnings import warn -""" -This module implements a registration procedure for integration tests which -need a running CaosDB instance. - -It ensures that tests do not accidentally overwrite data in real CaosDB -instances, as it checks whether the running CaosDB instance is actually the -correct one, that -should be used for these tests. - -The test files have to define a global variable TEST_KEY which must be unique -for each test using - -set_test_key("ABCDE") - -The test procedure (invoked by pytest) checks whether a registration -information is stored in one of the server properties or otherwise -- offers to register this test in the currently running database ONLY if this - is empty. -- fails otherwise with a RuntimeError - -NOTE: you probably need to use pytest with the -s option to be able to - register the test interactively. Otherwise, the server property has to be - set before server start-up in the server.conf of the CaosDB server. - -This module is intended to be used with pytest. - -There is a pytest fixture "clear_database" that performs the above mentioned -checks and clears the database in case of success. -""" - -TEST_KEY = None - - -def set_test_key(KEY): - global TEST_KEY - TEST_KEY = KEY - - -def _register_test(): - res = db.execute_query("COUNT Entity") - if not isinstance(res, int): - raise RuntimeError("Response from server for Info could not be interpreted.") - if res > 0: - raise RuntimeError("This instance of CaosDB contains entities already." - "It must be empty in order to register a new test.") - - print("Current host of CaosDB instance is: {}".format( - db.connection.connection.get_connection()._delegate_connection.setup_fields["host"])) - answer = input("This method will register your current test with key {} with the currently" - " running instance of CaosDB. Do you want to continue (y/N)?".format( - TEST_KEY)) - if answer != "y": - raise RuntimeError("Test registration aborted by user.") - - admin.set_server_property("_CAOSDB_INTEGRATION_TEST_SUITE_KEY", - TEST_KEY) - - -def _get_registered_test_key(): - try: - return admin.get_server_property("_CAOSDB_INTEGRATION_TEST_SUITE_KEY") - except KeyError: - return None - - -def _is_registered(): - registered_test_key = _get_registered_test_key() - if not registered_test_key: - return False - elif registered_test_key == TEST_KEY: - return True - else: - raise RuntimeError("The database has been setup for a different test.") - - -def _assure_test_is_registered(): - global TEST_KEY - if TEST_KEY is None: - raise RuntimeError("TEST_KEY is not defined.") - if not _is_registered(): - answer = input("Do you want to register this instance of CaosDB" - " with the current test? Do you want to continue (y/N)?") - if answer == "y": - _register_test() - raise RuntimeError("Test has been registered. Please rerun tests.") - else: - raise RuntimeError("The database has not been setup for this test.") - - -def _clear_database(): - c = db.execute_query("FIND ENTITY WITH ID>99") - c.delete(raise_exception_on_error=False) - return None - - -try: - import pytest - - @pytest.fixture - def clear_database(): - """Remove Records, RecordTypes, Properties, and Files ONLY IF the CaosDB - server the current connection points to was registered with the appropriate key. - - PyTestInfo Records and the corresponding RecordType and Property are preserved. - """ - _assure_test_is_registered() - yield _clear_database() # called before the test function - _clear_database() # called after the test function -except ImportError: - raise Warning("""The register_tests module depends on pytest and is - intended to be used in integration test suites for the - caosdb-pylib library only.""") +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.register_tests`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/utils/server_side_scripting.py b/src/caosdb/utils/server_side_scripting.py index 7e5ee4390ae3314792d12fd2942980aa3d9c9773..ccc8980ae11aae5f51a9555b81d4cba2f567664b 100644 --- a/src/caosdb/utils/server_side_scripting.py +++ b/src/caosdb/utils/server_side_scripting.py @@ -1,148 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> -# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""server_side_scripting -Helper functions for calling server-side scripts. -""" -from urllib.parse import quote -from lxml import etree +from linkahead.utils.server_side_scripting import * +from warnings import warn -from caosdb.connection.connection import get_connection -from caosdb.connection.utils import urlencode -from caosdb.connection.encode import (MultipartParam, multipart_encode, - ReadableMultiparts) - - -def _make_params(pos_args, opts): - """Create and return option string components. - -The return value is a dict with be something like `-O<key>`:`<value>` from `opts` and -`-p{0,1,2,3,...}`:`<value>` from `pos_args`. - - """ - result = {} - for key, val in opts.items(): - result["-O{key}".format(key=key)] = str(val) - for i, val in enumerate(pos_args): - result["-p{i}".format(i=i)] = str(val) - return result - - -def _make_multipart_request(call, pos_args, opts, files): - """Return body and header for an HTTP request. - """ - parts = list() - params = _make_params(pos_args, opts) - - parts.append(MultipartParam("call", call)) - for key, val in params.items(): - parts.append(MultipartParam(key, val)) - - for paramname, filename in files.items(): - parts.append(MultipartParam.from_file(paramname=paramname, - filename=filename)) - - body, headers = multipart_encode(parts) - body = ReadableMultiparts(body) - return body, headers - - -def _make_form_request(call, pos_args, opts): - """Return URL from call and argumewnts, and headers for urlencoding.""" - form = dict() - form["call"] = call - - params = _make_params(pos_args, opts) - for key, val in params.items(): - form[key] = val - - headers = {} - headers["Content-Type"] = "application/x-www-form-urlencoded" - return urlencode(form), headers - - -def _make_request(call, pos_args, opts, files=None): - """ - Multipart if with files, otherwise url-encoded. - - Return - ------ - path_segments, body, headers - """ - - if files is not None: - return _make_multipart_request(call, pos_args, opts, files) - - return _make_form_request(call, pos_args, opts) - - -def run_server_side_script(call, *args, files=None, **kwargs): - """ - - Return - ------ - response : ScriptingResponse - """ - body, headers = _make_request(call=call, pos_args=args, - opts=kwargs, files=files) - response = get_connection()._http_request(method="POST", - path=quote("scripting"), - body=body, - headers=headers) - xml = etree.parse(response) - code = int(xml.xpath("/Response/script/@code")[0]) - call = xml.xpath("/Response/script/call")[0].text - stdout = xml.xpath("/Response/script/stdout")[0].text - stderr = xml.xpath("/Response/script/stderr")[0].text - - return ScriptingResponse(call=call, - code=code, - stdout=stdout, - stderr=stderr) - - -class ScriptingResponse(): - """ScriptingResponse - - A data class for the response of server-side scripting calls. - - Properties - ---------- - code : int - The return code of the script process. - call : str - The complete call of the script minus the absolute path and the - auth_token. - stdout : str - The STDOUT of the script process. - stderr : str - The STDERR of the script process. - - """ - - def __init__(self, call, code, stdout, stderr): - self.call = call - self.code = code - self.stdout = stdout - self.stderr = stderr +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.utils.server_side_scripting`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/caosdb/yamlapi.py b/src/caosdb/yamlapi.py index 80bb4b13e4d1626c5d29c8950f3a22bbb73e0fdb..849c9febe723e8bdc61b962f006f72fedba76653 100644 --- a/src/caosdb/yamlapi.py +++ b/src/caosdb/yamlapi.py @@ -1,169 +1,6 @@ -# -*- coding: utf-8 -*- -# -# ** header v3.0 -# This file is a part of the CaosDB Project. -# -# Copyright (C) 2018 Research Group Biomedical Physics, -# Max-Planck-Institute for Dynamics and Self-Organization Göttingen -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see <https://www.gnu.org/licenses/>. -# -# ** end header -# -"""!!! Deprecated !!! YAML interface for the database (caosdb)""" +from linkahead.yamlapi import * +from warnings import warn -import yaml -from lxml import etree -from lxml.etree import Element -import re -import caosdb -import caosdb.common.utils as utils -from caosdb.connection.connection import get_connection -import warnings - - -def append_sublist(v, newel, def_entity_type): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - if v is None: - return - for i in v: - if isinstance(i, dict): - entity_type = def_entity_type - if "entity" in i: - entity_type = i["entity"] - del i["entity"] - newel.append(kv_to_xml(entity_type, i)) - - -def kv_to_xml(k, v): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - newel = Element(k) - # code.interact(local=locals()) - if isinstance(v, list): # Top level loop - append_sublist(v, newel, "Property") - elif isinstance(v, dict): - if "value" in v: - newel.text = v["value"] - del v["value"] - subdict = {"properties": "Property", "parents": "Parent"} - for jk, jv in subdict.items(): - if jk in v: - append_sublist(v[jk], newel, jv) - del v[jk] - for k2, v2 in v.items(): - newel.set(k2, str(v2)) - return newel - - -def dict_to_xml(d): - """ - d: The dictionary (possibly loaded from yaml) - to convert to caosdb-xml. - """ - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - return kv_to_xml("Entities", d) - - -def yaml_to_xml(yamlstr): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - """Load a yaml document from yamlstr and converts it to XML. - - Parameters - ---------- - yamlstr : str - The string to load the yaml document from. - - """ - return dict_to_xml(yaml.load(yamlstr, Loader=yaml.SafeLoader)) - - -def process(text): - """Do some replacements on the original file to obtain valid yaml.""" - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - processed = re.sub( - "^(\\s*)-\\s*\\{?(.*)\\}?\\s*$", - "\\1- {\\2}", - text, - flags=re.MULTILINE) - processed = re.sub("^(\\s*)\\+\\s*(.*)\\s*$", "\\1- \\2", - processed, flags=re.MULTILINE) - print(processed) - return processed - - -def yaml_file_to_xml(yamlfilename): - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - with open(yamlfilename, "r") as f: - return yaml_to_xml(process(f.read())) - - -def insert_yaml_file(yamlfilename, simulate=False): - """Inserts the contents of 'yamlfilename' into the database. - - Set 'simulate' to True if you don't actually want to insert the xml, - but only receive what would be sent. - """ - warnings.warn(""" - This function is deprecated and will be removed with the next release. - Please use caosdb-advanced-user-tools/models/data_model.py for a - similar functionality.""", DeprecationWarning) - con = get_connection() - prs = etree.XMLParser(remove_blank_text=True) - sent_xml = etree.tostring( - etree.fromstring( - etree.tostring( - yaml_file_to_xml(yamlfilename)), - prs), - pretty_print=True) - if simulate: - return "", sent_xml.decode("utf-8") - response = con.insert(entity_uri_segment="Entity/", - body=sent_xml) - resp_text = response.readall() - resp_elem = etree.fromstring(resp_text, prs) - for i in resp_elem.iter("Error"): - print("ERROR: " + i.get("description")) - child = i.getparent() - while child is not None: - childname = "" - childid = "" - # print(etree.tostring(child)) - if child.get("name") is not None: - childname = child.get("name") - if child.get("id") is not None: - childid = child.get("id") - print(" in " + child.tag + " " + childname + " " + childid) - child = child.getparent() - return etree.tostring(resp_elem, - pretty_print=True).decode( - "utf-8"), sent_xml.decode("utf-8") +warn(("CaosDB was renamed to LinkAhead. Please import this library as `import linkahead.yamlapi`. Using the" + " old name, starting with caosdb, is deprecated."), DeprecationWarning) diff --git a/src/doc/Makefile b/src/doc/Makefile index 64219c5957ee963e84f9305685f2ec4e8ed3d761..f25b8e501d5f295af9b17700b76f58494b62b66f 100644 --- a/src/doc/Makefile +++ b/src/doc/Makefile @@ -1,5 +1,5 @@ # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> # Copyright (C) 2020 Daniel Hornung <d.hornung@indiscale.com> @@ -28,7 +28,7 @@ SPHINXOPTS ?= -a SPHINXBUILD ?= sphinx-build SPHINXAPIDOC ?= sphinx-apidoc -PY_BASEDIR = ../caosdb +PY_BASEDIR = ../linkahead SOURCEDIR = . BUILDDIR = ../../build/doc diff --git a/src/doc/administration.rst b/src/doc/administration.rst index eab02e43a833559dc21ea7a9fa5edfaf6431facf..cef9b69483339ebb803b4882fd73beb189c5c184 100644 --- a/src/doc/administration.rst +++ b/src/doc/administration.rst @@ -1,16 +1,16 @@ Administration ============== -The Python script ``caosdb_admin.py`` should be used for administrative tasks. -Call ``caosdb_admin.py --help`` to see how to use it. +The Python script ``linkahead_admin.py`` should be used for administrative tasks. +Call ``linkahead_admin.py --help`` to see how to use it. -The most common task is to create a new user (in the CaosDB realm) and set a +The most common task is to create a new user (in the LinkAhead realm) and set a password for the user (note that a user typically needs to be activated): .. code:: console - $ caosdb_admin.py create_user anna - $ caosdb_admin.py set_user_password anna - $ caosdb_admin.py add_user_roles anna administration - $ caosdb_admin.py activate_user anna + $ linkahead_admin.py create_user anna + $ linkahead_admin.py set_user_password anna + $ linkahead_admin.py add_user_roles anna administration + $ linkahead_admin.py activate_user anna diff --git a/src/doc/concepts.rst b/src/doc/concepts.rst index 29625a0a105dacdea2183eac743d1904a7743ec7..2438614f3b1fb8f1b392fef9a03f618dc1cf9750 100644 --- a/src/doc/concepts.rst +++ b/src/doc/concepts.rst @@ -1,6 +1,6 @@ -======================== -The concepts of PyCaosDB -======================== +=========================== +The concepts of PyLinkAhead +=========================== - `Configuration <configuration>` diff --git a/src/doc/conf.py b/src/doc/conf.py index 2dd4ab62fd5248ca185ea2e0472c493134149cca..1b70ef07f0bfa5e9c336222c74f097924c244767 100644 --- a/src/doc/conf.py +++ b/src/doc/conf.py @@ -24,7 +24,7 @@ import sphinx_rtd_theme # noqa: E402 # -- Project information ----------------------------------------------------- -project = 'pycaosdb' +project = 'pylinkahead' copyright = '2023, IndiScale GmbH' author = 'Daniel Hornung' @@ -115,7 +115,7 @@ html_static_path = ['_static'] # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'caosdb-pylibdoc' +htmlhelp_basename = 'linkahead-pylibdoc' # -- Options for LaTeX output ------------------------------------------------ @@ -142,7 +142,7 @@ latex_elements = { # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'caosdb-pylib.tex', 'caosdb-pylib Documentation', + (master_doc, 'linkahead-pylib.tex', 'linkahead-pylib Documentation', 'IndiScale GmbH', 'manual'), ] @@ -152,7 +152,7 @@ latex_documents = [ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'pycaosdb', 'pycaosdb documentation', + (master_doc, 'pylinkahead', 'pylinkahead documentation', [author], 1) ] @@ -163,8 +163,8 @@ man_pages = [ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'pycaosdb', 'pycaosdb documentation', - author, 'pycaosdb', 'One line description of project.', + (master_doc, 'pylinkahead', 'pylinkahead documentation', + author, 'pylinkahead', 'One line description of project.', 'Miscellaneous'), ] @@ -202,9 +202,9 @@ epub_exclude_files = ['search.html'] # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#confval-intersphinx_mapping intersphinx_mapping = { "python": ("https://docs.python.org/", None), - "caosdb-mysqlbackend": ("https://docs.indiscale.com/caosdb-mysqlbackend/", - None), - "caosdb-server": ("https://docs.indiscale.com/caosdb-server/", None), + "linkahead-mysqlbackend": ("https://docs.indiscale.com/caosdb-mysqlbackend/", + None), + "linkahead-server": ("https://docs.indiscale.com/caosdb-server/", None), } diff --git a/src/doc/configuration.md b/src/doc/configuration.md index 02cbbd7b13d916a676ad26c277e370ae76bf3725..54ae251b9db9ef000545e701406b979aa58043f8 100644 --- a/src/doc/configuration.md +++ b/src/doc/configuration.md @@ -1,16 +1,16 @@ -# Configuration of PyCaosDB # -The behavior of PyCaosDB is defined via a configuration that is provided using configuration files. -PyCaosDB tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or -alternatively in `~/.pycaosdb.ini` upon import. After that, the ini file `pycaosdb.ini` in the +# Configuration of PyLinkAhead # +The behavior of PyLinkAhead is defined via a configuration that is provided using configuration files. +PyLinkAhead tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or +alternatively in `~/.pylinkahead.ini` upon import. After that, the ini file `pylinkahead.ini` in the current working directory will be read additionally, if it exists. Here, we will look at the most common configuration options. For a full and comprehensive -description please check out the [example pycaosdb.ini file](https://gitlab.com/caosdb/caosdb-pylib/-/blob/main/examples/pycaosdb.ini). You can download this file and use +description please check out the [example pylinkahead.ini file](https://gitlab.com/linkahead/linkahead-pylib/-/blob/main/examples/pylinkahead.ini). You can download this file and use it as a starting point. Typically, you need to change at least the `url` and `username` fields as required. (Ask your -CaosDB administrator or IT crowd if you do not know what to put there, but for the demo instance at +LinkAhead administrator or IT crowd if you do not know what to put there, but for the demo instance at https://demo.indiscale.com, `username=admin` and `password=caosdb` should work). ## Authentication ## @@ -46,7 +46,7 @@ username=YOUR_USERNAME ## SSL Certificate ## -In some cases (especially if you are testing CaosDB) you might need to supply an SSL certificate to +In some cases (especially if you are testing LinkAhead) you might need to supply an SSL certificate to allow SSL encryption. The `cacert` option sets the path to the ssl certificate for the connection: @@ -59,12 +59,12 @@ cacert=/path/to/caosdb.ca.pem ## Further Settings ## `debug=0` ensures that debug information is **not** printed to the terminal every time you interact -with CaosDB which makes the experience much less verbose. Set it to 1 or 2 in case you want to help +with LinkAhead which makes the experience much less verbose. Set it to 1 or 2 in case you want to help debugging (which I hope will not be necessary for this tutorial) or if you want to learn more about the internals of the protocol. `timeout` sets the timeout for requests to the server. A complete list of options can be found in the -[pycaosdb.ini file](https://gitlab.com/caosdb/caosdb-pylib/-/blob/main/examples/pycaosdb.ini) in +[pylinkahead.ini file](https://gitlab.com/linkahead/linkahead-pylib/-/blob/main/examples/pylinkahead.ini) in the examples folder of the source code. diff --git a/src/doc/future_caosdb.md b/src/doc/future_linkahead.md similarity index 87% rename from src/doc/future_caosdb.md rename to src/doc/future_linkahead.md index de6170fa42674ed4e3161fb791a397a149dba659..7c85de95b07c8101c04e8fcac82205e20fb9e8a6 100644 --- a/src/doc/future_caosdb.md +++ b/src/doc/future_linkahead.md @@ -1,4 +1,4 @@ -# The future of the CaosDB Python Client +# The future of the LinkAhead Python Client The current Python client has done us great services but its structure and the way it is used sometimes feels outdated and clumsy. In this document we sketch @@ -7,13 +7,13 @@ contribute to this development. At several locations in this document there will be links to discussion issues. If you want to discuss something new, you can create a new issue -[here](https://gitlab.com/caosdb/caosdb-pylib/-/issues/new). +[here](https://gitlab.com/linkahead/linkahead-pylib/-/issues/new). ## Overview Let's get a general impression before discussing single aspects. ``` python -import caosdb as db +import linkahead as db experiments = db.query("FIND Experiment") # print name and date for each `Experiment` for exp in experiments: @@ -29,13 +29,13 @@ new_one.name = "Needle Measurement" new_one.insert() ``` Related discussions: -- [recursive retrieve in query](https://gitlab.com/caosdb/caosdb-pylib/-/issues/57) -- [create_record function](https://gitlab.com/caosdb/caosdb-pylib/-/issues/58) -- [data model utility](https://gitlab.com/caosdb/caosdb-pylib/-/issues/59) +- [recursive retrieve in query](https://gitlab.com/linkahead/linkahead-pylib/-/issues/57) +- [create_record function](https://gitlab.com/linkahead/linkahead-pylib/-/issues/58) +- [data model utility](https://gitlab.com/linkahead/linkahead-pylib/-/issues/59) ## Quickstart Note that you can try out one possible implementation using the -`caosdb.high_level_api` module. It is experimental and might be removed in +`linkahead.high_level_api` module. It is experimental and might be removed in future! A `resolve_references` function allows to retrieve the referenced entities of @@ -47,7 +47,7 @@ function: - `depth`: Maximum recursion depth - `references`: Whether to use the supplied db.Container to resolve references. This allows offline usage. Set it to None if you want to - automatically retrieve entities from the current CaosDB connection. + automatically retrieve entities from the current LinkAhead connection. In order to allow a quick look at the object structures an easily readable serialization is provided by the `to_dict` function. It has the following @@ -76,7 +76,7 @@ but no Properties are inserted unexpectedly with NULL values. - Raise Exception if attribute does not exist but is accessed? -[Discussion](https://gitlab.com/caosdb/caosdb-pylib/-/issues/60) +[Discussion](https://gitlab.com/linkahead/linkahead-pylib/-/issues/60) We aim for a distinction between "concrete" Properties of Records/RecordTypes and "abstract" Properties as part of the definition of a data model. Concrete properties are always "contained" in a record or record type while abstract properties stand for themselves. @@ -129,7 +129,7 @@ GRPC json serialization? I can resolve later and end up with the same result: `recs =db.query("FIND Experiment", depth=2)` equals `recs = db.query("FIND Experiment"); recs = resolve_references(recs, depth=2)` -[Discussion](https://gitlab.com/caosdb/caosdb-pylib/-/issues/57) +[Discussion](https://gitlab.com/linkahead/linkahead-pylib/-/issues/57) #### Alternative @@ -155,11 +155,11 @@ Especially the following functions operate by default NOT in-place: - insert - retrieve - resolve_references -[Discussion](https://gitlab.com/caosdb/caosdb-pylib/-/issues/61) +[Discussion](https://gitlab.com/linkahead/linkahead-pylib/-/issues/61) ## Extended Example ``` python -import caosdb as db +import linkahead as db dm = db.get_data_model() @@ -174,7 +174,7 @@ print("The new record has the ID:", inserted.id) ``` ### Factory method -While creating an Entity will not talk to a CaosDB server and can thus be done offline, the factory method +While creating an Entity will not talk to a LinkAhead server and can thus be done offline, the factory method `create_record` allows to 1. Retrieve the parent and set attributes according to inheritance 2. Use a container to resolve the parent and set attributes diff --git a/src/doc/gallery/Makefile b/src/doc/gallery/Makefile index 658f9a6a93e23957b20aee5f38e5565bde35af80..b8ed911d9fdeb2cc64023ee0c6ec2306f84d5afa 100644 --- a/src/doc/gallery/Makefile +++ b/src/doc/gallery/Makefile @@ -1,4 +1,4 @@ -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2022 IndiScale GmbH <info@indiscale.com> # Copyright (C) 2022 Daniel Hornung <d.hornung@indiscale.com> diff --git a/src/doc/gallery/curator-permissions.rst b/src/doc/gallery/curator-permissions.rst index fa6b4022b7fbc1d042ed00f265e63a2675794a21..f5113a9812a37e8b8dd8e95599fba8061967640b 100644 --- a/src/doc/gallery/curator-permissions.rst +++ b/src/doc/gallery/curator-permissions.rst @@ -18,7 +18,7 @@ In the following, you'll learn how to Prerequisites ------------- -This example needs some preparations regarding your CaosDB setup that have to +This example needs some preparations regarding your LinkAhead setup that have to (or, for the sake of simplicity, should) be done outside the actual Python example script. @@ -26,27 +26,27 @@ The curator role ~~~~~~~~~~~~~~~~ First, a ``curator`` role is created with a meaningful description. We'll use -``caosdb_admin.py`` for this which leads to the following command: +``linkahead_admin.py`` for this which leads to the following command: .. code:: console - $ caosdb_admin.py create_role "curator" "A user who is permitted to create new Records, Properties, and RecordTypes but who is not allowed to change the core data model." + $ linkahead_admin.py create_role "curator" "A user who is permitted to create new Records, Properties, and RecordTypes but who is not allowed to change the core data model." To actually see how this role's permissions change, we also need a user with this role. Assume you already have created and activated (see :doc:`Administration <../administration>`) a ``test_curator`` user, then -``caosdb_admin.py`` is used again to assign it the correct role: +``linkahead_admin.py`` is used again to assign it the correct role: .. code:: console - $ caosdb_admin.py add_user_roles test_curator curator + $ linkahead_admin.py add_user_roles test_curator curator .. note:: The ``test_curator`` user shouldn't have administration privileges, otherwise the below changes won't have any effect. -The core data model and caosdb-advanced-user-tools +The core data model and linkahead-advanced-user-tools ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In principle, the following script works with any data model defined in a json @@ -61,8 +61,8 @@ Clone the schemata into the same directory containing the below script via $ git clone https://github.com/leibniz-zmt/zmt-metadata-schema.git -Furthermore, we'll need the `CaosDB Advanced User Tools -<https://gitlab.com/caosdb/caosdb-advanced-user-tools>`_ for loading the +Furthermore, we'll need the `LinkAhead Advanced User Tools +<https://gitlab.com/linkahead/linkahead-advanced-user-tools>`_ for loading the metadata schemata from the json files, so install them via .. code:: console @@ -109,13 +109,13 @@ Your complete ``global_entities_permissions.xml`` might then look like .. note:: - Note that you have to restart your CaosDB server after modifying the + Note that you have to restart your LinkAhead server after modifying the ``global_entities_permissions.xml``. The code -------- -After having applied all of the above prerequisites and restarting your CaosDB +After having applied all of the above prerequisites and restarting your LinkAhead server, execute the following code. :download:`Download full code<curator_permissions.py>` diff --git a/src/doc/gallery/curator_permissions.py b/src/doc/gallery/curator_permissions.py index 16b4b7f6f1bb9abfb7e191c6a1101181984bce9a..a9c9f319b3c153bebd09516d46a75e1f28eb40bf 100644 --- a/src/doc/gallery/curator_permissions.py +++ b/src/doc/gallery/curator_permissions.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # encoding: utf-8 # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2022 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com> @@ -23,9 +23,9 @@ import os import sys -import caosdb as db +import linkahead as db from caosadvancedtools.models.parser import parse_model_from_json_schema -from caosdb import administration as admin +from linkahead import administration as admin CURATOR = "curator" diff --git a/src/doc/gallery/index.rst b/src/doc/gallery/index.rst index bfba4317c3556d0692eb402f42ba3699be586d5a..f3c6c699d110ca2927e75a4a37055a020b55918a 100644 --- a/src/doc/gallery/index.rst +++ b/src/doc/gallery/index.rst @@ -1,13 +1,13 @@ -PyCaosDB Code Gallery -===================== +PyLinkAhead Code Gallery +======================== -This chapter collects code examples which can be immediately run against an empty CaosDB instance. +This chapter collects code examples which can be immediately run against an empty LinkAhead instance. .. note:: These examples require a configuration file with valid server and user/password settings. Refer - to the :ref:`Configuration <Configuration of PyCaosDB>` section for details. + to the :ref:`Configuration <Configuration of PyLinkAhead>` section for details. .. toctree:: :maxdepth: 2 diff --git a/src/doc/gallery/simulation.py b/src/doc/gallery/simulation.py index 342d5d980fc2b1a981f4a76d99e1954f8b2f5c2a..f92e7b7f0cdc305fbe8a19a4c3d8755d1348c16e 100644 --- a/src/doc/gallery/simulation.py +++ b/src/doc/gallery/simulation.py @@ -1,5 +1,5 @@ """ -Run a simulation and store the values into CaosDB. +Run a simulation and store the values into LinkAhead. >>> main() # doctest: +ELLIPSIS These distances resulted in small x,y, values: @@ -8,12 +8,12 @@ These distances resulted in small x,y, values: import numpy as np import scipy.integrate -import caosdb as db +import linkahead as db from caosadvancedtools.table_converter import to_table -def setup_caosdb(): - """Create the data model and insert it into CaosDB +def setup_linkahead(): + """Create the data model and insert it into LinkAhead The data model consists of the following RecordTypes: @@ -35,7 +35,7 @@ def setup_caosdb(): The data model of course also contains the corresponding properties for these RecordTypes. """ - cont = db.Container() # Container to insert all Entities at once into CaosDB + cont = db.Container() # Container to insert all Entities at once into LinkAhead # create Properties cont.append(db.Property("x", datatype=db.DOUBLE)) cont.append(db.Property("y", datatype=db.DOUBLE)) @@ -74,7 +74,7 @@ def simulations(n, t_max): # Get the parameters and result initial, result = run_simulation(run=i, t_max=t_max) - # Prepare CaosDB insertion + # Prepare LinkAhead insertion run = db.Record().add_parent("SoftwareRun").add_property("Software", value=software.id) parameters = (db.Record().add_parent("Parameters").add_property("x", initial[0]) .add_property("y", initial[1]).add_property("z", initial[2])) @@ -83,7 +83,7 @@ def simulations(n, t_max): run.add_property("Parameters", value=parameters).add_property("Result", value=result_record) cont = db.Container() cont.extend([run, parameters, result_record]) - cont.insert() # Insert everything of this run into CaosDB. + cont.insert() # Insert everything of this run into LinkAhead. def run_simulation(run, t_max): @@ -121,7 +121,7 @@ def analyze(): def main(): # 1. Set up the data model - setup_caosdb() + setup_linkahead() # 2. Run simulations simulations(n=200, t_max=5) diff --git a/src/doc/gallery/simulation.rst b/src/doc/gallery/simulation.rst index ce1a7f457a142e36ef9f2b0cfe6a4df0b9fcedf6..887c6219d80b579bda50fff5cf6411798579228c 100644 --- a/src/doc/gallery/simulation.rst +++ b/src/doc/gallery/simulation.rst @@ -6,7 +6,7 @@ This code example 1. sets up the data model 2. runs simulations -3. stores the simulation parameters and results into CaosDB +3. stores the simulation parameters and results into LinkAhead 4. retrieves the parameters for interesting results. :download:`Download code<simulation.py>` diff --git a/src/doc/high_level_api.rst b/src/doc/high_level_api.rst index e92f2dd5e326b14222ad3c776ce5f5ed1ed31536..5f8ae7f9b998fd1205674250383f06ae25aaf460 100644 --- a/src/doc/high_level_api.rst +++ b/src/doc/high_level_api.rst @@ -2,16 +2,15 @@ High Level API ============== In addition to the old standard pylib API, new versions of pylib ship -with a high level API that facilitates usage of CaosDB entities within +with a high level API that facilitates usage of LinkAhead entities within data analysis scripts. In a nutshell that API exposes all properties of -CaosDB Records as standard python attributes making their access easier. +LinkAhead Records as standard python attributes making their access easier. Or to speak it out directly in Python: .. code:: python - - import caosdb as db + import linkahead as db # Old API: r = db.Record() r.add_parent("Experiment") @@ -19,7 +18,7 @@ Or to speak it out directly in Python: r.get_property("alpha").value = 25 # setting properties (old api) print(r.get_property("alpha").value + 25) # getting properties (old api) - from caosdb.high_level_api import convert_to_python_entity + from linkahead.high_level_api import convert_to_python_entity obj = convert_to_python_object(r) # create a high level entity obj.r = 25 # setting properties (new api) print(obj.r + 25) # getting properties (new api) @@ -28,7 +27,7 @@ Quickstart ---------- The module, needed for the high level API is called: -``caosdb.high_level_api`` +``linkahead.high_level_api`` There are two functions converting entities between the two representation (old API and new API): @@ -50,15 +49,15 @@ practical shorthands: return it as high level entity. - ``create_entity_container``: Convert a high level entity into a standard entity including all sub entities. -- ``query``: Do a CaosDB query and return the result as a container of +- ``query``: Do a LinkAhead query and return the result as a container of high level objects. -So as a first example, you could retrieve any record from CaosDB and use +So as a first example, you could retrieve any record from LinkAhead and use it using its high level representation: .. code:: python - from caosdb.high_level_api import query + from linkahead.high_level_api import query res = query("FIND Experiment") experiment = res[0] @@ -78,7 +77,7 @@ series of commands to achieve the same result: .. code:: python - import caosdb as db + import linkahead as db res = db.execute_query("FIND Experiment") output = res.get_property("output") @@ -87,12 +86,12 @@ series of commands to achieve the same result: Resolving subproperties makes use of the "resolve\ :sub:`reference`" function provided by the high level entity class -(``CaosDBPythonEntity``), with the following parameters: +(``LinkAheadPythonEntity``), with the following parameters: - ``deep``: Whether to use recursive retrieval - ``references``: Whether to use the supplied db.Container to resolve references. This allows offline usage. Set it to None if you want to - automatically retrieve entities from the current CaosDB connection. + automatically retrieve entities from the current LinkAhead connection. - ``visited``: Needed for recursion, set this to None. Objects in the high level representation can be serialized to a simple @@ -123,37 +122,37 @@ As described in the section Quickstart the two functions beetween the high level and the standard representation. The high level entities are represented using the following classes from -the module ``caosdb.high_level_api``: - -- ``CaosDBPythonEntity``: Base class of the following entity classes. -- ``CaosDBPythonRecord`` -- ``CaosDBPythonRecordType`` -- ``CaosDBPythonProperty`` -- ``CaosDBPythonMultiProperty``: **WARNING** Not implemented yet. -- ``CaosDBPythonFile``: Used for file entities and provides an +the module ``linkahead.high_level_api``: + +- ``LinkAheadPythonEntity``: Base class of the following entity classes. +- ``LinkAheadPythonRecord`` +- ``LinkAheadPythonRecordType`` +- ``LinkAheadPythonProperty`` +- ``LinkAheadPythonMultiProperty``: **WARNING** Not implemented yet. +- ``LinkAheadPythonFile``: Used for file entities and provides an additional ``download`` function for being able to directly retrieve - files from CaosDB. + files from LinkAhead. In addition, there are the following helper structures which are realized as Python data classes: -- ``CaosDBPropertyMetaData``: For storing meta data about properties. -- ``CaosDBPythonUnresolved``: The base class of unresolved "things". -- ``CaosDBPythonUnresolvedParent``: Parents of entities are stored as +- ``LinkAheadPropertyMetaData``: For storing meta data about properties. +- ``LinkAheadPythonUnresolved``: The base class of unresolved "things". +- ``LinkAheadPythonUnresolvedParent``: Parents of entities are stored as unresolved parents by default, storing an id or a name of a parent (or both). -- ``CaosDBPythonUnresolvedReference``: An unresolved reference is a +- ``LinkAheadPythonUnresolvedReference``: An unresolved reference is a reference property with an id which has not (yet) been resolved to an Entity. The function "resolve\ :sub:`references`" can be used to recursively -replace ``CaosDBPythonUnresolvedReferences`` into members of type -``CaosDBPythonRecords`` or ``CaosDBPythonFile``. +replace ``LinkAheadPythonUnresolvedReferences`` into members of type +``LinkAheadPythonRecords`` or ``LinkAheadPythonFile``. -Each property stored in a CaosDB record corresponds to: +Each property stored in a LinkAhead record corresponds to: -- a member attribute of ``CaosDBPythonRecord`` **and** -- an entry in a dict called "metadata" storing a CaosDBPropertyMetadata +- a member attribute of ``LinkAheadPythonRecord`` **and** +- an entry in a dict called "metadata" storing a LinkAheadPropertyMetadata object with the following information about proeprties: - ``unit`` diff --git a/src/doc/index.rst b/src/doc/index.rst index 7344b6aacdd55fd75f4940d834104faa00c33069..5e5a5e8801b0bd4c91ce225766d7973ee8fa2b92 100644 --- a/src/doc/index.rst +++ b/src/doc/index.rst @@ -1,6 +1,6 @@ -Welcome to PyCaosDB's documentation! -==================================== +Welcome to PyLinkAhead's documentation! +======================================= .. toctree:: :maxdepth: 2 @@ -14,9 +14,9 @@ Welcome to PyCaosDB's documentation! Administration <administration> High Level API <high_level_api> Code gallery <gallery/index> - API documentation<_apidoc/caosdb> + API documentation<_apidoc/linkahead> -This is the documentation for the Python client library for CaosDB, ``PyCaosDB``. +This is the documentation for the Python client library for LinkAhead, ``PyLinkAhead``. This documentation helps you to :doc:`get started<README_SETUP>`, explains the most important :doc:`concepts<concepts>` and offers a range of :doc:`tutorials<tutorials/index>`. diff --git a/src/doc/tutorials/Data-Insertion.rst b/src/doc/tutorials/Data-Insertion.rst index 82df07691f7c78a2787d67463ca222d2e68249ca..6804ef5cd67f0febfcb0ab20f40e10aa16547b8b 100644 --- a/src/doc/tutorials/Data-Insertion.rst +++ b/src/doc/tutorials/Data-Insertion.rst @@ -4,12 +4,12 @@ Data Insertion Data Models ~~~~~~~~~~~ -Data is stored and structured in CaosDB using a concept of RecordTypes, Properties, Records etc. If -you do not know what these are, please look at the chapter :doc:`Data -Model<caosdb-server:Data-Model>` in the CaosDB server documentation. +Data is stored and structured in LinkAhead using a concept of RecordTypes, Properties, Records etc. +If you do not know what these are, please look at the chapter :doc:`Data +Model<linkahead-server:Data-Model>` in the LinkAhead server documentation. In order to insert some actual data, we need to create a data model -using RecordTypes and Properties (You may skip this if you use a CaosDB +using RecordTypes and Properties (You may skip this if you use a LinkAhead instance that already has the required types). When you create a new Property you must supply a datatype. So, let’s create a simple Property called “a†of datatype double. This is very easy in pylib: @@ -60,18 +60,18 @@ resolution, but we'll omit this for the sake of brevity for now. print(rt.get_property(name="epsilon").importance) ### rt has a "epsilon" property with the same importance as "BarkleySimulation" The parameter ``inheritance=(obligatory|recommended|fix|all|none)`` of -:py:meth:`Entity.add_parent()<caosdb.common.models.Entity.add_parent>` tells the server to assign +:py:meth:`Entity.add_parent()<linkahead.common.models.Entity.add_parent>` tells the server to assign all properties of the parent RecordType with the chosen importance (and properties with a higher importance) to the child RecordType automatically upon insertion. See the chapter on `importance <https://docs.indiscale.com/caosdb-server/specification/RecordType.html#importance>`_ in the -documentation of the CaosDB server for more information on the importance and inheritance of +documentation of the LinkAhead server for more information on the importance and inheritance of properties. .. note:: The inherited properties will only be visible after the insertion since they are set by the - CaosDB server, not by the Python client. + LinkAhead server, not by the Python client. Insert Actual Data @@ -111,7 +111,7 @@ record as its source data. Since we know that the id of the experiment record is The experiment record's id is used as the value of the ``Experiment`` property of the analysis Record (note how we use the RecordType ``Experiment`` as a -``REFERENCE`` property here). Sending a CaosDB query like ``FIND RECORD +``REFERENCE`` property here). Sending a LinkAhead query like ``FIND RECORD Experiment WHICH IS REFERENCED BY A Analysis WITH date=2020-01-08`` would now return our original experiment record. @@ -141,7 +141,7 @@ Finally, we can also insert both records at the same time using a cont = db.Container().extend([rec, ana]) # Add experiment and analysis # records to our container - cont.insert() # Insert both at the same time, the CaosDB server will + cont.insert() # Insert both at the same time, the LinkAhead server will # resolve the reference upon insertion. All three ways result in an Analysis record which references an Experiment @@ -209,7 +209,7 @@ list-valued attribute in Python, as the following example illustrates. .. code:: python - import caosdb as db + import linkahead as db db.Property(name="TestList", datatype=db.LIST(db.DOUBLE)).insert() db.RecordType(name="TestType").add_property(name="TestList").insert() db.Record(name="TestRec").add_parent("TestType").add_property( @@ -254,7 +254,7 @@ Updating an existing file by uploading a new version. .. code:: python - import caosdb as db + import linkahead as db file_upd = db.File(id=174).retrieve() diff --git a/src/doc/tutorials/Entity-Getters.rst b/src/doc/tutorials/Entity-Getters.rst index 50ed13201e5720de22bf0b605bc5162834a458a8..5bcc97fb7ef097cfa8463e9736ee891152e55559 100644 --- a/src/doc/tutorials/Entity-Getters.rst +++ b/src/doc/tutorials/Entity-Getters.rst @@ -2,7 +2,7 @@ Entity Getters ============== -There is a very frequent situation when working with PyCaosDB: You need to get a specific Entity +There is a very frequent situation when working with PyLinkAhead: You need to get a specific Entity from the remote server. For example, you need the Property Entity in order to make an update. Sure, you can do a ``db.Entity().retrieve()`` or submit a query, but there is an even faster way which also helps preventing errors: diff --git a/src/doc/tutorials/basic_analysis.rst b/src/doc/tutorials/basic_analysis.rst index c40cad28b8c9a3be537c641b9614da2eb4df8dd9..960d542feea44103dbdb493860207d4de434ee2b 100644 --- a/src/doc/tutorials/basic_analysis.rst +++ b/src/doc/tutorials/basic_analysis.rst @@ -4,14 +4,14 @@ Basic Analysis If you have not yet, configure a connection with the demo instance. E.g.: ->>> import caosdb as db +>>> import linkahead as db >>> _ = db.configure_connection( ... url="https://demo.indiscale.com/", ... password_method="plain", ... username="admin", ... password="caosdb") -A basic Analysis of data in CaosDB could start like: +A basic Analysis of data in LinkAhead could start like: >>> >>> analyses = db.execute_query("FIND RECORD Analysis with quality_factor") diff --git a/src/doc/tutorials/caching.rst b/src/doc/tutorials/caching.rst index aad9a1ddbd9e93a3cd06887eaffcf956c3c5bea6..e814d32ff1be12708c6f0ec93b9424aa5a813ff4 100644 --- a/src/doc/tutorials/caching.rst +++ b/src/doc/tutorials/caching.rst @@ -21,12 +21,12 @@ function, easily created from ``get_entity_by_name`` using Python's ``lru_cache` # reset the cache with cached_get_by_name.cache_clear() -For convenience, PyCaosDB provides the ``caosdb.cached`` module that defines the functions +For convenience, PyLinkAhead provides the ``linkahead.cached`` module that defines the functions ``cached_query`` and ``cached_get_entity_by``, they use a shared cache. Let's have a look: .. code:: python - from caosdb.cached import cached_query, cached_get_entity_by, cache_clear, cache_info, cache_initialize + from linkahead.cached import cached_query, cached_get_entity_by, cache_clear, cache_info, cache_initialize rt1 = cached_get_entity_by(name='RT1') qresult = cached_query('FIND Experiment WITH parameter=1') # you can inspect the cache @@ -45,7 +45,7 @@ have entities on hand from previous queries that you want to add. .. code:: python - from caosdb.cached import cache_fill, AccessType + from linkahead.cached import cache_fill, AccessType # Here, items must be a dict with Entity IDs as keys and the Entities as values. cache_fill(items, AccessType.EID, unique=True) # If you now use IDs that were in items, they are taken from the cache. diff --git a/src/doc/tutorials/complex_data_models.rst b/src/doc/tutorials/complex_data_models.rst index 7b45b6a2681bcf781fd4acc9329ffada28d4e01c..569acdae174a9df9d0d2b5eae9a0084d793cc90c 100644 --- a/src/doc/tutorials/complex_data_models.rst +++ b/src/doc/tutorials/complex_data_models.rst @@ -1,7 +1,7 @@ Complex Data Models ------------------- -With CaosDB it is possible to create very complex data models. +With LinkAhead it is possible to create very complex data models. E.g. it is possible to add properties to properties to cover complex relations in data management workflows. @@ -17,7 +17,7 @@ Examples .. code-block:: python3 - import caosdb as db + import linkahead as db # Create two record types with descriptions: rt1 = db.RecordType(name="TypeA", description="The first type") diff --git a/src/doc/tutorials/data-model-interface.md b/src/doc/tutorials/data-model-interface.md index f6967c57a0a3de6e7c6fd3d2b64d3f59620526de..fa21c0c413fa2b72f247f5823fc98ea3a765868e 100644 --- a/src/doc/tutorials/data-model-interface.md +++ b/src/doc/tutorials/data-model-interface.md @@ -8,16 +8,16 @@ pip3 install --user --no-deps . ``` in ```bash -CaosDB/data_models +LinkAhead/data_models ``` Change to the appropriate directory ```bash -cd CaosDB/data_models +cd LinkAhead/data_models ``` There are "data models" defined in ```bash -caosdb_models +linkahead_models ``` having an ending like "_model.py" A set of data models is also considered to be a model diff --git a/src/doc/tutorials/errors.rst b/src/doc/tutorials/errors.rst index 37c53c9b527a0435f9f24ae6c6e71687e73eb963..a66b7c7607f6f3bcac7a843ed8dc09893b4b99a2 100644 --- a/src/doc/tutorials/errors.rst +++ b/src/doc/tutorials/errors.rst @@ -3,16 +3,16 @@ Error Handling ============== In case of erroneous transactions, connection problems and a lot of -other cases, PyCaosDB may raise specific errors in order to pinpoint +other cases, PyLinkAhead may raise specific errors in order to pinpoint the problem as precisely as possible. Some of these errors a -representations of errors in the CaosDB server, others stem from +representations of errors in the LinkAhead server, others stem from problems that occurred on the client side. The errors and exceptions are ordered hierarchically form the most general exceptions to specific transaction or connection problems. The most important error types and the hierarchy will be explained in the following. For more information on specific error types, see also the -:doc:`source code<../_apidoc/caosdb.exceptions>`. +:doc:`source code<../_apidoc/linkahead.exceptions>`. .. note:: @@ -23,13 +23,13 @@ following. For more information on specific error types, see also the discussed. Please refer to the documentation of PyCaosDB 0.4.1 and earlier for the old error handling. -CaosDBException ----------------- +LinkAheadException +------------------ -``CaosDBException`` is the most generic exception and all other error classes inherit +``LinkAheadException`` is the most generic exception and all other error classes inherit from this one. Because of its generality, it doesn't tell you much -except that some component of PyCaosDB raised an exception. If you -want to catch all possible CaosDB errors, this is the class to use. +except that some component of PyLinkAhead raised an exception. If you +want to catch all possible LinkAhead errors, this is the class to use. TransactionError ---------------- @@ -116,7 +116,7 @@ HTTP Errors ----------- An ``HTTPClientError`` or an ``HTTPServerError`` is raised in case of -http(s) connection problems caused by the Python client or the CaosDB +http(s) connection problems caused by the Python client or the LinkAhead server, respectively. There are the following subclasses of ``HTTPClientError`` that are used to specify the connection problem: @@ -130,9 +130,9 @@ server, respectively. There are the following subclasses of Other Errors ------------ -There are further subclasses of ``CaosDBException`` that are raised in +There are further subclasses of ``LinkAheadException`` that are raised in case of faulty configurations or other problems. They should be rather -self-explanatory from their names; see the :doc:`source code<../_apidoc/caosdb.exceptions>` +self-explanatory from their names; see the :doc:`source code<../_apidoc/linkahead.exceptions>` for further information. * ``ConfigurationError`` @@ -148,7 +148,7 @@ Examples .. code-block:: python3 - import caosdb as db + import linkahead as db def link_and_insert(entity, linked, link=True): """Link the ENTITY to LINKED and insert it.""" diff --git a/src/doc/tutorials/first_steps.rst b/src/doc/tutorials/first_steps.rst index c84ec52aa63f0563b22c698081e89600c7af6122..d2a7019b14e469854dbc25efec60a1a9b0bdbacf 100644 --- a/src/doc/tutorials/first_steps.rst +++ b/src/doc/tutorials/first_steps.rst @@ -1,18 +1,18 @@ First Steps =========== -You should have a working connection to a CaosDB instance now. If not, please check out the +You should have a working connection to a LinkAhead instance now. If not, please check out the :doc:`Getting Started secton</README_SETUP>`. -If you are not yet familiar with Records, RecordTypes and Properties used in CaosDB, +If you are not yet familiar with Records, RecordTypes and Properties used in LinkAhead, please check out the respective part in the `Web Interface tutorial`_. -You should also know the basics of the CaosDB Query Language (a tutorial is +You should also know the basics of the LinkAhead Query Language (a tutorial is `here <https://docs.indiscale.com/caosdb-webui/tutorials/query.html>`_). We recommend that you connect to the `demo instance`_ (hosted by `Indiscale`_) in order to try out the following examples. You can do this with ->>> import caosdb as db +>>> import linkahead as db >>> _ = db.configure_connection( ... url="https://demo.indiscale.com/", ... password_method="plain", @@ -34,7 +34,7 @@ data is the strength of the web interface while the automated processing of data is the strength of the Python client. >>> type(response) -<class 'caosdb.common.models.Container'> +<class 'linkahead.common.models.Container'> As you can see the type of the returned object is Container. Containers are simply lists of LinkAhead objects with useful functions to interact with LinkAhead. @@ -47,7 +47,7 @@ Let's look at the first element: >>> firstguitar = response[0] >>> print(type(firstguitar)) -<class 'caosdb.common.models.Record'> +<class 'linkahead.common.models.Record'> >>> print(firstguitar) <Record ... @@ -93,7 +93,7 @@ Ids can also come in handy when searching. Suppose you have some complicated con >>> # thrown if the number of results is unequal to 1 and the resulting object will be >>> # an Entity and not a Container >>> print(type(record)) -<class 'caosdb.common.models.Record'> +<class 'linkahead.common.models.Record'> >>> print(record.id) 123 @@ -120,7 +120,7 @@ If the files are large data files, it is often a better idea to only retrieve th Summary ------- -Now you know how to use Python to send queries to CaosDB and you can access +Now you know how to use Python to send queries to LinkAhead and you can access the result Records and their properties. The next tutorial shows how to make some meaningful use of this. diff --git a/src/doc/tutorials/index.rst b/src/doc/tutorials/index.rst index ce37993d7ec5e0888da8a2b4c58904bcbdc43bb4..706e26c2b1b4876c29d43c2bddd9a5fe357a003d 100644 --- a/src/doc/tutorials/index.rst +++ b/src/doc/tutorials/index.rst @@ -1,6 +1,6 @@ -PyCaosDB Tutorials -================== +PyLinkAhead Tutorials +===================== This chapter contains tutorials that lead you from the first steps to advanced usage of the Python client. diff --git a/src/doc/tutorials/serverside.rst b/src/doc/tutorials/serverside.rst index 93f0fdcf742efc70bc80f5113eb7c6ddbbf87cde..de5f34b47e3d30b2789c116b915b1402b676ad44 100644 --- a/src/doc/tutorials/serverside.rst +++ b/src/doc/tutorials/serverside.rst @@ -3,7 +3,7 @@ Server Side Scripting ===================== The administrator may store regularly needed scripts, e.g. for computing a -standardized analysis, on the same machine as the CaosDB server, "on the server +standardized analysis, on the same machine as the LinkAhead server, "on the server side", where they can be run directly by the server. The execution of those scripts can be initiated using the Python client, or the @@ -17,7 +17,7 @@ execution is fairly simple: .. code:: python - from caosdb.utils.server_side_scripting import run_server_side_script + from linkahead.utils.server_side_scripting import run_server_side_script response = run_server_side_script('scriptname.py') print(response.stderr,response.stdout) @@ -29,7 +29,7 @@ script (``stderr`` and ``stdout``) is returned within an response object. If the script requires additional arguments, those can be provided after the script's name. -Note that by default the script runs with your CaosDB account. It has your +Note that by default the script runs with your LinkAhead account. It has your permissions and changes are logged as if they were done by you directly. @@ -37,8 +37,8 @@ Testing it ~~~~~~~~~~ You can try this out using for example the ``diagnostics.py`` script (it is part -of the `CaosDB server repository -<https://gitlab.indiscale.com/caosdb/src/caosdb-server/-/blob/main/scripting/bin/administration/diagnostics.py>`_ +of the `LinkAhead server repository +<https://gitlab.com/linkahead/linkahead-server/-/blob/main/scripting/bin/administration/diagnostics.py>`_ and is also available on https://demo.indiscale.com). The script returns information about the server in JSON format. You can do for example the following: @@ -46,7 +46,7 @@ following: .. code:: python import json - from caosdb.utils.server_side_scripting import run_server_side_script + from linkahead.utils.server_side_scripting import run_server_side_script response = run_server_side_script('administration/diagnostics.py') print("JSON content:") print(json.loads(response.stdout)) diff --git a/src/linkahead/__init__.py b/src/linkahead/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3a8c5ba39c88deaa5dc945135e3828945fd39d58 --- /dev/null +++ b/src/linkahead/__init__.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""LinkAhead Python bindings. + +Tries to read from the inifile specified in the environment variable `PYCAOSDBINI` or +alternatively in `~/.pylinkahead.ini` upon import. After that, the ini file `pylinkahead.ini` in +the current working directory will be read additionally, if it exists. + +""" + +from os import environ, getcwd +# Import of the connection function (which is used to connect to the DB): +from os.path import expanduser, join +from warnings import warn + +# Import of convenience methods: +from . import apiutils +from .common import administration +from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, LIST, + REFERENCE, TEXT) +# Import of the basic API classes: +from .common.models import (ACL, ALL, FIX, NONE, OBLIGATORY, RECOMMENDED, + SUGGESTED, Container, DropOffBox, Entity, File, + Info, Message, Permissions, Property, Query, + QueryTemplate, Record, RecordType, delete, + execute_query, get_global_acl, + get_known_permissions, raise_errors) +from .common.state import State, Transition +from .configuration import _read_config_files, configure, get_config +from .connection.connection import configure_connection, get_connection +from .exceptions import * +from .utils.get_entity import (get_entity_by_id, get_entity_by_name, + get_entity_by_path) + +try: + from .version import version as __version__ +except ModuleNotFoundError: + version = "uninstalled" + __version__ = version + +_read_config_files() diff --git a/src/linkahead/apiutils.py b/src/linkahead/apiutils.py new file mode 100644 index 0000000000000000000000000000000000000000..597342e38a961c628edd84dd8dff37471ef2570b --- /dev/null +++ b/src/linkahead/apiutils.py @@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""API-Utils: Some simplified functions for generation of records etc. + +""" + +import logging +import warnings +from collections.abc import Iterable +from typing import Any, Dict, List + +from .common.datatype import is_reference +from .common.models import (SPECIAL_ATTRIBUTES, Container, Entity, File, + Property, Record, RecordType, execute_query) +from .exceptions import LinkAheadException +from .utils.git_utils import (get_branch_in, get_commit_in, get_diff_in, + get_origin_url_in) + +logger = logging.getLogger(__name__) + + +class EntityMergeConflictError(LinkAheadException): + """An error that is raised in case of an unresolvable conflict when merging + two entities. + """ + + +def new_record(record_type, name=None, description=None, + tempid=None, insert=False, **kwargs): + """Function to simplify the creation of Records. + + record_type: The name of the RecordType to use for this record. + (ids should also work.) + name: Name of the new Record. + kwargs: Key-value-pairs for the properties of this Record. + + Returns: The newly created Record. + + Of course this functions requires an open database connection! + """ + + rt = RecordType(name=record_type) + rt.retrieve() + + r = Record(name) + r.add_parent(rt) + + if tempid is not None: + r.id = tempid + + if description is not None: + r.description = description + + # Add all additional properties, treat iterables als multiple + # additions. + + for k, v in kwargs.items(): + if hasattr(v, "encode") or not isinstance(v, Iterable): + v = [v] + + for vv in v: + p = Property(k) + p.retrieve() + p.value = vv + r.add_property(p) + + if insert: + r.insert() + + return r + + +def id_query(ids): + warnings.warn("Please use 'create_id_query', which only creates" + "the string.", DeprecationWarning) + + return execute_query(create_id_query(ids)) + + +def create_id_query(ids): + return "FIND ENTITY WITH " + " OR ".join( + ["ID={}".format(id) for id in ids]) + + +def get_type_of_entity_with(id_): + objs = retrieve_entities_with_ids([id_]) + + if len(objs) == 0: + raise RuntimeError("ID {} not found.".format(id_)) + + if len(objs) > 1: + raise RuntimeError( + "ID {} is not unique. This is probably a bug in the LinkAhead server." .format(id_)) + obj = objs[0] + + if isinstance(obj, Record): + return Record + elif isinstance(obj, RecordType): + return RecordType + elif isinstance(obj, Property): + return Property + elif isinstance(obj, File): + return File + elif isinstance(obj, Entity): + return Entity + + +def retrieve_entity_with_id(eid): + return execute_query("FIND ENTITY WITH ID={}".format(eid), unique=True) + + +def retrieve_entities_with_ids(entities): + collection = Container() + step = 20 + + for i in range(len(entities)//step+1): + collection.extend( + execute_query( + create_id_query(entities[i*step:(i+1)*step]))) + + return collection + + +def getOriginUrlIn(folder): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use the linkahead.utils.git_utils.get_origin_url_in instead.""", + DeprecationWarning) + return get_origin_url_in(folder) + + +def getDiffIn(folder, save_dir=None): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use the linkahead.utils.git_utils.get_diff_in instead.""", + DeprecationWarning) + return get_diff_in(folder, save_dir) + + +def getBranchIn(folder): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use the linkahead.utils.git_utils.get_branch_in instead.""", + DeprecationWarning) + return get_branch_in(folder) + + +def getCommitIn(folder): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use the linkahead.utils.git_utils.get_commit_in instead.""", + DeprecationWarning) + return get_commit_in(folder) + + +def compare_entities(old_entity: Entity, new_entity: Entity, compare_referenced_records: bool = False): + """Compare two entites. + + Return a tuple of dictionaries, the first index belongs to additional information for old + entity, the second index belongs to additional information for new entity. + + Additional information means in detail: + - Additional parents (a list under key "parents") + - Information about properties: + - Each property lists either an additional property or a property with a changed: + - datatype + - importance or + - value (not implemented yet) + + In case of changed information the value listed under the respective key shows the + value that is stored in the respective entity. + + If `compare_referenced_records` is `True`, also referenced entities will be + compared using this function (which is then called with + `compare_referenced_records = False` to prevent infinite recursion in case + of circular references). + + Parameters + ---------- + old_entity, new_entity : Entity + Entities to be compared + compare_referenced_records : bool, optional + Whether to compare referenced records in case of both, `old_entity` and + `new_entity`, have the same reference properties and both have a Record + object as value. If set to `False`, only the corresponding Python + objects are compared which may lead to unexpected behavior when + identical records are stored in different objects. Default is False. + + """ + olddiff: Dict[str, Any] = {"properties": {}, "parents": []} + newdiff: Dict[str, Any] = {"properties": {}, "parents": []} + + if old_entity is new_entity: + return (olddiff, newdiff) + + for attr in SPECIAL_ATTRIBUTES: + try: + oldattr = old_entity.__getattribute__(attr) + old_entity_attr_exists = True + except BaseException: + old_entity_attr_exists = False + try: + newattr = new_entity.__getattribute__(attr) + new_entity_attr_exists = True + except BaseException: + new_entity_attr_exists = False + + if old_entity_attr_exists and (oldattr == "" or oldattr is None): + old_entity_attr_exists = False + + if new_entity_attr_exists and (newattr == "" or newattr is None): + new_entity_attr_exists = False + + if not old_entity_attr_exists and not new_entity_attr_exists: + continue + + if ((old_entity_attr_exists ^ new_entity_attr_exists) + or (oldattr != newattr)): + + if old_entity_attr_exists: + olddiff[attr] = oldattr + + if new_entity_attr_exists: + newdiff[attr] = newattr + + # properties + + for prop in old_entity.properties: + matching = [p for p in new_entity.properties if p.name == prop.name] + + if len(matching) == 0: + olddiff["properties"][prop.name] = {} + elif len(matching) == 1: + newdiff["properties"][prop.name] = {} + olddiff["properties"][prop.name] = {} + + if (old_entity.get_importance(prop.name) != + new_entity.get_importance(prop.name)): + olddiff["properties"][prop.name]["importance"] = \ + old_entity.get_importance(prop.name) + newdiff["properties"][prop.name]["importance"] = \ + new_entity.get_importance(prop.name) + + if (prop.datatype != matching[0].datatype): + olddiff["properties"][prop.name]["datatype"] = prop.datatype + newdiff["properties"][prop.name]["datatype"] = \ + matching[0].datatype + + if (prop.unit != matching[0].unit): + olddiff["properties"][prop.name]["unit"] = prop.unit + newdiff["properties"][prop.name]["unit"] = \ + matching[0].unit + + if (prop.value != matching[0].value): + # basic comparison of value objects says they are different + same_value = False + if compare_referenced_records: + # scalar reference + if isinstance(prop.value, Entity) and isinstance(matching[0].value, Entity): + # explicitely not recursive to prevent infinite recursion + same_value = empty_diff( + prop.value, matching[0].value, compare_referenced_records=False) + # list of references + elif isinstance(prop.value, list) and isinstance(matching[0].value, list): + # all elements in both lists actually are entity objects + # TODO: check, whether mixed cases can be allowed or should lead to an error + if all([isinstance(x, Entity) for x in prop.value]) and all([isinstance(x, Entity) for x in matching[0].value]): + # can't be the same if the lengths are different + if len(prop.value) == len(matching[0].value): + # do a one-by-one comparison; the values are the same, if all diffs are empty + same_value = all( + [empty_diff(x, y, False) for x, y in zip(prop.value, matching[0].value)]) + + if not same_value: + olddiff["properties"][prop.name]["value"] = prop.value + newdiff["properties"][prop.name]["value"] = \ + matching[0].value + + if (len(newdiff["properties"][prop.name]) == 0 + and len(olddiff["properties"][prop.name]) == 0): + newdiff["properties"].pop(prop.name) + olddiff["properties"].pop(prop.name) + + else: + raise NotImplementedError( + "Comparison not implemented for multi-properties.") + + for prop in new_entity.properties: + if len([0 for p in old_entity.properties if p.name == prop.name]) == 0: + newdiff["properties"][prop.name] = {} + + # parents + + for parent in old_entity.parents: + if len([0 for p in new_entity.parents if p.name == parent.name]) == 0: + olddiff["parents"].append(parent.name) + + for parent in new_entity.parents: + if len([0 for p in old_entity.parents if p.name == parent.name]) == 0: + newdiff["parents"].append(parent.name) + + return (olddiff, newdiff) + + +def empty_diff(old_entity: Entity, new_entity: Entity, compare_referenced_records: bool = False): + """Check whether the `compare_entities` found any differences between + old_entity and new_entity. + + Parameters + ---------- + old_entity, new_entity : Entity + Entities to be compared + compare_referenced_records : bool, optional + Whether to compare referenced records in case of both, `old_entity` and + `new_entity`, have the same reference properties and both have a Record + object as value. + + """ + olddiff, newdiff = compare_entities( + old_entity, new_entity, compare_referenced_records) + for diff in [olddiff, newdiff]: + for key in ["parents", "properties"]: + if len(diff[key]) > 0: + # There is a difference somewhere in the diff + return False + for key in SPECIAL_ATTRIBUTES: + if key in diff and diff[key]: + # There is a difference in at least one special attribute + return False + # all elements of the two diffs were empty + return True + + +def merge_entities(entity_a: Entity, entity_b: Entity, merge_references_with_empty_diffs=True, + force=False): + """Merge entity_b into entity_a such that they have the same parents and properties. + + datatype, unit, value, name and description will only be changed in entity_a + if they are None for entity_a and set for entity_b. If there is a + corresponding value for entity_a different from None, an + EntityMergeConflictError will be raised to inform about an unresolvable merge + conflict. + + The merge operation is done in place. + + Returns entity_a. + + WARNING: This function is currently experimental and insufficiently tested. Use with care. + + Parameters + ---------- + entity_a, entity_b : Entity + The entities to be merged. entity_b will be merged into entity_a in place + merge_references_with_empty_diffs : bool, optional + Whether the merge is performed if entity_a and entity_b both reference + record(s) that may be different Python objects but have empty diffs. If + set to `False` a merge conflict will be raised in this case + instead. Default is True. + force : bool, optional + If True, in case `entity_a` and `entity_b` have the same properties, the + values of `entity_a` are replaced by those of `entity_b` in the merge. + If `False`, an EntityMergeConflictError is raised instead. Default is False. + + Returns + ------- + entity_a : Entity + The initial entity_a after the in-place merge + + Raises + ------ + EntityMergeConflictError + In case of an unresolvable merge conflict. + + """ + + logger.warning( + "This function is currently experimental and insufficiently tested. Use with care.") + + # Compare both entities: + diff_r1, diff_r2 = compare_entities( + entity_a, entity_b, compare_referenced_records=merge_references_with_empty_diffs) + + # Go through the comparison and try to apply changes to entity_a: + for key in diff_r2["parents"]: + entity_a.add_parent(entity_b.get_parent(key)) + + for key in diff_r2["properties"]: + if key in diff_r1["properties"]: + if ("importance" in diff_r1["properties"][key] and + "importance" in diff_r2["properties"][key]): + if (diff_r1["properties"][key]["importance"] != + diff_r2["properties"][key]["importance"]): + raise NotImplementedError() + elif ("importance" in diff_r1["properties"][key] or + "importance" in diff_r2["properties"][key]): + raise NotImplementedError() + + for attribute in ("datatype", "unit", "value"): + if (attribute in diff_r2["properties"][key] and + diff_r2["properties"][key][attribute] is not None): + if (diff_r1["properties"][key][attribute] is None): + setattr(entity_a.get_property(key), attribute, + diff_r2["properties"][key][attribute]) + elif force: + setattr(entity_a.get_property(key), attribute, + diff_r2["properties"][key][attribute]) + else: + raise EntityMergeConflictError( + f"Entity a ({entity_a.id}, {entity_a.name}) " + f"has a Property '{key}' with {attribute}=" + f"{diff_r2['properties'][key][attribute]}\n" + f"Entity b ({entity_b.id}, {entity_b.name}) " + f"has a Property '{key}' with {attribute}=" + f"{diff_r1['properties'][key][attribute]}") + else: + # TODO: This is a temporary FIX for + # https://gitlab.indiscale.com/caosdb/src/caosdb-pylib/-/issues/105 + entity_a.add_property(id=entity_b.get_property(key).id, + name=entity_b.get_property(key).name, + datatype=entity_b.get_property(key).datatype, + value=entity_b.get_property(key).value, + unit=entity_b.get_property(key).unit, + importance=entity_b.get_importance(key)) + # entity_a.add_property( + # entity_b.get_property(key), + # importance=entity_b.get_importance(key)) + + for special_attribute in ("name", "description"): + sa_a = getattr(entity_a, special_attribute) + sa_b = getattr(entity_b, special_attribute) + if sa_a != sa_b: + if sa_a is None: + setattr(entity_a, special_attribute, sa_b) + elif force: + # force overwrite + setattr(entity_a, special_attribute, sa_b) + else: + raise EntityMergeConflictError( + f"Conflict in special attribute {special_attribute}:\n" + f"A: {sa_a}\nB: {sa_b}") + return entity_a + + +def describe_diff(olddiff, newdiff, name=None, as_update=True): + description = "" + + for attr in list(set(list(olddiff.keys()) + list(newdiff.keys()))): + if attr == "parents" or attr == "properties": + continue + description += "{} differs:\n".format(attr) + description += "old version: {}\n".format( + olddiff[attr] if attr in olddiff else "not set") + description += "new version: {}\n\n".format( + newdiff[attr] if attr in newdiff else "not set") + + if len(olddiff["parents"]) > 0: + description += ("Parents that are only in the old version:\n" + + ", ".join(olddiff["parents"])) + + if len(newdiff["parents"]) > 0: + description += ("Parents that are only in the new version:\n" + + ", ".join(olddiff["parents"])) + + for prop in list(set(list(olddiff["properties"].keys()) + + list(newdiff["properties"].keys()))): + description += "property {} differs:\n".format(prop) + + if prop not in olddiff["properties"]: + description += "it does not exist in the old version: \n" + elif prop not in newdiff["properties"]: + description += "it does not exist in the new version: \n" + else: + description += "old version: {}\n".format( + olddiff["properties"][prop]) + description += "new version: {}\n\n".format( + newdiff["properties"][prop]) + + if description != "": + description = ("## Difference between the old and the new " + "version of {}\n\n".format(name))+description + + return description + + +def apply_to_ids(entities, func): + """ Apply a function to all ids. + + All ids means the ids of the entities themselves but also to all parents, + properties and referenced entities. + + Parameters + ---------- + entities : list of Entity + func : function with one parameter. + """ + + for entity in entities: + _apply_to_ids_of_entity(entity, func) + + +def _apply_to_ids_of_entity(entity, func): + entity.id = func(entity.id) + + for par in entity.parents: + par.id = func(par.id) + + for prop in entity.properties: + prop.id = func(prop.id) + isref = is_reference(prop.datatype) + + if isref: + if isinstance(prop.value, list): + prop.value = [func(el) for el in prop.value] + else: + if prop.value is not None: + prop.value = func(prop.value) + + +def resolve_reference(prop: Property): + """resolves the value of a reference property + + The integer value is replaced with the entity object. + If the property is not a reference, then the function returns without + change. + """ + + if not prop.is_reference(server_retrieval=True): + return + + if isinstance(prop.value, list): + referenced = [] + + for val in prop.value: + if isinstance(val, int): + referenced.append(retrieve_entity_with_id(val)) + else: + referenced.append(val) + prop.value = referenced + else: + if isinstance(prop.value, int): + prop.value = retrieve_entity_with_id(prop.value) + + +def create_flat_list(ent_list: List[Entity], flat: List[Entity]): + """ + Recursively adds all properties contained in entities from ent_list to + the output list flat. Each element will only be added once to the list. + + TODO: Currently this function is also contained in newcrawler module crawl. + We are planning to permanently move it to here. + """ + for ent in ent_list: + for p in ent.properties: + # For lists append each element that is of type Entity to flat: + if isinstance(p.value, list): + for el in p.value: + if isinstance(el, Entity): + if el not in flat: + flat.append(el) + # TODO: move inside if block? + create_flat_list([el], flat) + elif isinstance(p.value, Entity): + if p.value not in flat: + flat.append(p.value) + # TODO: move inside if block? + create_flat_list([p.value], flat) diff --git a/src/linkahead/cached.py b/src/linkahead/cached.py new file mode 100644 index 0000000000000000000000000000000000000000..2eff5b1b7e0b9c3a6b3b5b461c6920a2a90f3202 --- /dev/null +++ b/src/linkahead/cached.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2023 IndiScale GmbH <info@indiscale.com> +# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> +# Copyright (C) 2023 Daniel Hornung <d.hornung@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# + +""" +This module provides some cached versions of functions that retrieve Entities from a remote server. + +See also +======== + +- ``cache_initialize(...)`` : Re-initialize the cache. +- ``cache_clear()`` : Clear the cache. +- ``cached_query(query)`` : A cached version of ``execute_query(query)``. +- ``cached_get_entity_by(...)`` : Get an Entity by name, id, ... +""" + +from enum import Enum +from functools import lru_cache +from typing import Union + +from .utils import get_entity +from .common.models import execute_query, Entity, Container + + +# roughly 1GB for typical entity sizes +DEFAULT_SIZE = 33333 + +# This dict cache is solely for filling the real cache manually (e.g. to reuse older query results) +_DUMMY_CACHE = {} + + +class AccessType(Enum): + """Different access types for cached queries. Needed for filling the cache manually with +:func:`cache_fill` . + + """ + QUERY = 1 + PATH = 2 + EID = 3 + NAME = 4 + + +def cached_get_entity_by(eid: Union[str, int] = None, name: str = None, path: str = None, query: + str = None) -> Entity: + """Return a single entity that is identified uniquely by one argument. + +You must supply exactly one argument. + +If a query phrase is given, the result must be unique. If this is not what you need, use +:func:`cached_query` instead. + + """ + count = 0 + if eid is not None: + count += 1 + if name is not None: + count += 1 + if path is not None: + count += 1 + if query is not None: + count += 1 + if count != 1: + raise ValueError("You must supply exactly one argument.") + + if eid is not None: + return _cached_access(AccessType.EID, eid, unique=True) + if name is not None: + return _cached_access(AccessType.NAME, name, unique=True) + if path is not None: + return _cached_access(AccessType.PATH, path, unique=True) + if query is not None: + return _cached_access(AccessType.QUERY, query, unique=True) + + raise ValueError("Not all arguments may be None.") + + +def cached_query(query_string) -> Container: + """A cached version of :func:`linkahead.execute_query<linkahead.common.models.execute_query>`. + +All additional arguments are at their default values. + + """ + return _cached_access(AccessType.QUERY, query_string, unique=False) + + +@lru_cache(maxsize=DEFAULT_SIZE) +def _cached_access(kind: AccessType, value: Union[str, int], unique=True): + # This is the function that is actually cached. + # Due to the arguments, the cache has kind of separate sections for cached_query and + # cached_get_entity_by with the different AccessTypes. However, there is only one cache size. + + # The dummy dict cache is only for filling the cache manually, it is deleted afterwards. + if value in _DUMMY_CACHE: + return _DUMMY_CACHE[value] + + if kind == AccessType.QUERY: + return execute_query(value, unique=unique) + if kind == AccessType.NAME: + return get_entity.get_entity_by_name(value) + if kind == AccessType.EID: + return get_entity.get_entity_by_id(value) + if kind == AccessType.PATH: + return get_entity.get_entity_by_path(value) + + raise ValueError(f"Unknown AccessType: {kind}") + + +def cache_clear() -> None: + """Empty the cache that is used by `cached_query` and `cached_get_entity_by`.""" + _cached_access.cache_clear() + + +def cache_info(): + """Return info about the cache that is used by `cached_query` and `cached_get_entity_by`. + +Returns +------- + +out: named tuple + See the standard library :func:`functools.lru_cache` for details.""" + return _cached_access.cache_info() + + +def cache_initialize(maxsize=DEFAULT_SIZE) -> None: + """Create a new cache with the given size for `cached_query` and `cached_get_entity_by`. + + This implies a call of :func:`cache_clear`, the old cache is emptied. + + """ + cache_clear() + global _cached_access + _cached_access = lru_cache(maxsize=maxsize)(_cached_access.__wrapped__) + + +def cache_fill(items: dict, kind: AccessType = AccessType.EID, unique: bool = True) -> None: + """Add entries to the cache manually. + + This allows to fill the cache without actually submitting queries. Note that this does not + overwrite existing entries with the same keys. + +Parameters +---------- + +items: dict + A dictionary with the entries to go into the cache. The keys must be compatible with the + AccessType given in ``kind`` + +kind: AccessType, optional + The AccessType, for example ID, name, path or query. + +unique: bool, optional + If True, fills the cache for :func:`cached_get_entity_by`, presumably with + :class:`linkahead.Entity<linkahead.common.models.Entity>` objects. If False, the cache should be + filled with :class:`linkahead.Container<linkahead.common.models.Container>` objects, for use with + :func:`cached_query`. + + """ + # 1. add the given items to the corresponding dummy dict cache + _DUMMY_CACHE.update(items) + + # 2. call the cache function with each key (this only results in a dict look up) + for key in items.keys(): + _cached_access(kind, key, unique=unique) + + # 3. empty the dummy dict cache again + _DUMMY_CACHE.clear() diff --git a/src/linkahead/common/__init__.py b/src/linkahead/common/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f74e0da15952a3ed0bea2b3f3ebffe1cb5754cc4 --- /dev/null +++ b/src/linkahead/common/__init__.py @@ -0,0 +1 @@ +"""Commonly used classes for LinkAhead.""" diff --git a/src/linkahead/common/administration.py b/src/linkahead/common/administration.py new file mode 100644 index 0000000000000000000000000000000000000000..417081b0dad19ce15049b8ce05aeef8cc86607f7 --- /dev/null +++ b/src/linkahead/common/administration.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""missing docstring.""" + +import random +import re +import string + +from lxml import etree + +from ..connection.connection import get_connection +from ..exceptions import (EntityDoesNotExistError, HTTPClientError, + HTTPForbiddenError, HTTPResourceNotFoundError, + ServerConfigurationException) +from .utils import xml2str + + +def set_server_property(key, value): + """set_server_property. + + Set a server property. + + Parameters + ---------- + key : str + The name of the server property. + value : str + The value of the server property. + + + Returns + ------- + None + """ + con = get_connection() + try: + con._form_data_request(method="POST", path="_server_properties", + params={key: value}).read() + except EntityDoesNotExistError: + raise ServerConfigurationException( + "Debug mode in server is probably disabled.") from None + + +def get_server_properties(): + """get_server_properties. + + Get all server properties as a dict. + + Returns + ------- + dict + The server properties. + """ + con = get_connection() + try: + body = con._http_request( + method="GET", path="_server_properties") + except EntityDoesNotExistError: + raise ServerConfigurationException( + "Debug mode in server is probably disabled.") from None + + xml = etree.parse(body) + props = dict() + + for elem in xml.getroot(): + props[elem.tag] = elem.text + + return props + + +def get_server_property(key): + """get_server_property. + + Get a server property. + + Parameters + ---------- + key : str + The name of the server property + + Returns + ------- + value : str + The string value of the server property. + + Raises + ------ + KeyError + If the server property is no defined. + """ + + return get_server_properties()[key] + + +def generate_password(length: int): + """Create a random password that fulfills the security requirements + + Parameters + ---------- + length : int + Length of the generated password. Has to be greater than 7. + + Returns + ------- + password : string + Generated random password of the given length + + Raises + ------ + ValueError: + If the length is less than 8. + """ + minimum_password_length = 8 + if length < minimum_password_length: + raise ValueError("LinkAhead passwords have to be at least {} characters.".format( + minimum_password_length)) + sample_letters = string.ascii_letters + string.digits + "!#$%*+-/:;?_" + password = ''.join((random.choice(sample_letters) for i in range(length))) + + while not re.match(r"(?=.*[A-Z])(?=.*[a-z])(?=.*\d)(?=.*[\W_]).{8,}", + password): + password = ''.join((random.choice(sample_letters) + for i in range(length))) + + return password + + +def _retrieve_user(name, realm=None, **kwargs): + con = get_connection() + try: + return con._http_request(method="GET", path="User/" + (realm + "/" + name if realm is not None else name), **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to retrieve this user." + raise + except HTTPResourceNotFoundError as e: + e.msg = "User does not exist." + raise + + +def _delete_user(name, **kwargs): + con = get_connection() + try: + return con._http_request(method="DELETE", path="User/" + name, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to delete this user." + raise + except HTTPResourceNotFoundError as e: + e.msg = "User does not exist." + raise + + +def _update_user(name, realm=None, password=None, status=None, + email=None, entity=None, **kwargs): + con = get_connection() + params = {} + + if password is not None: + params["password"] = password + + if status is not None: + params["status"] = status + + if email is not None: + params["email"] = email + + if entity is not None: + params["entity"] = str(entity) + try: + return con.put_form_data(entity_uri_segment="User/" + (realm + "/" + name if realm is not None else name), params=params, **kwargs).read() + except HTTPResourceNotFoundError as e: + e.msg = "User does not exist." + raise e + except HTTPForbiddenError as e: + e.msg = "You are not permitted to update this user." + raise e + except HTTPClientError as e: + for elem in etree.fromstring(e.body): + if elem.tag == "Error": + e.msg = elem.get("description") + raise + + +def _insert_user(name, password=None, status=None, email=None, entity=None, **kwargs): + con = get_connection() + params = {"username": name} + + if password is not None: + params["password"] = password + + if status is not None: + params["status"] = status + + if email is not None: + params["email"] = email + + if entity is not None: + params["entity"] = entity + try: + return con.post_form_data(entity_uri_segment="User", params=params, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to insert a new user." + raise e + except HTTPClientError as e: + for elem in etree.fromstring(e.body): + if elem.tag == "Error": + e.msg = elem.get("description") + raise e + + +def _insert_role(name, description, **kwargs): + con = get_connection() + try: + return con.post_form_data(entity_uri_segment="Role", params={"role_name": name, "role_description": description}, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to insert a new role." + raise + except HTTPClientError as e: + if e.status == 409: + e.msg = "Role name is already in use. Choose a different name." + raise + + +def _update_role(name, description, **kwargs): + con = get_connection() + try: + return con.put_form_data(entity_uri_segment="Role/" + name, params={"role_description": description}, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to update this role." + raise + except HTTPResourceNotFoundError as e: + e.msg = "Role does not exist." + raise + + +def _retrieve_role(name, **kwargs): + con = get_connection() + try: + return con._http_request(method="GET", path="Role/" + name, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to retrieve this role." + raise + except HTTPResourceNotFoundError as e: + e.msg = "Role does not exist." + raise + + +def _delete_role(name, **kwargs): + con = get_connection() + try: + return con._http_request(method="DELETE", path="Role/" + name, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to delete this role." + raise + except HTTPResourceNotFoundError as e: + e.msg = "Role does not exist." + raise + + +def _set_roles(username, roles, realm=None, **kwargs): + xml = etree.Element("Roles") + + for r in roles: + xml.append(etree.Element("Role", name=r)) + + body = xml2str(xml) + con = get_connection() + try: + body = con._http_request(method="PUT", path="UserRoles/" + (realm + "/" + + username if realm is not None else username), body=body, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to set this user's roles." + raise + except HTTPResourceNotFoundError as e: + e.msg = "User does not exist." + raise + except HTTPClientError as e: + if e.status == 409: + e.msg = "Role does not exist." + raise + ret = set() + + for r in etree.fromstring(body)[0]: + if r.tag == "Role": + ret.add(r.get("name")) + + return ret + + +def _get_roles(username, realm=None, **kwargs): + con = get_connection() + try: + body = con._http_request(method="GET", path="UserRoles/" + ( + realm + "/" + username if realm is not None else username), **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to retrieve this user's roles." + raise + except HTTPResourceNotFoundError as e: + e.msg = "User does not exist." + raise + ret = set() + + for r in etree.fromstring(body).xpath('/Response/Roles')[0]: + if r.tag == "Role": + ret.add(r.get("name")) + + return ret + + +def _set_permissions(role, permission_rules, **kwargs): + """Set permissions for a role. + +Parameters +---------- + +role : str + The role for which the permissions are set. + +permission_rules : iterable<PermissionRule> + An iterable with PermissionRule objects. + +**kwargs : + Additional arguments which are passed to the HTTP request. + +Returns +------- + None + """ + xml = etree.Element("PermissionRules") + + for p in permission_rules: + xml.append(p._to_xml()) + + body = xml2str(xml) + con = get_connection() + try: + return con._http_request(method="PUT", path="PermissionRules/" + role, body=body, **kwargs).read() + except HTTPForbiddenError as e: + e.msg = "You are not permitted to set this role's permissions." + raise + except HTTPResourceNotFoundError as e: + e.msg = "Role does not exist." + raise + + +def _get_permissions(role, **kwargs): + con = get_connection() + try: + return PermissionRule._parse_body(con._http_request(method="GET", path="PermissionRules/" + role, **kwargs).read()) + except HTTPForbiddenError as e: + e.msg = "You are not permitted to retrieve this role's permissions." + raise + except HTTPResourceNotFoundError as e: + e.msg = "Role does not exist." + raise + + +class PermissionRule(): + """Permission rules. + +Parameters +---------- +action : str + Either "grant" or "deny" + +permission : str + For example ``RETRIEVE:*``. + +priority : bool, optional + Whether the priority shall be set, defaults is False. + """ + + @staticmethod + def _parse_boolean(bstr): + return str(bstr) in ["True", "true", "TRUE", "yes"] + + def __init__(self, action, permission, priority=False): + self._action = action + self._permission = permission + self._priority = PermissionRule._parse_boolean(priority) + + def _to_xml(self): + xml = etree.Element(self._action) + xml.set("permission", self._permission) + + if self._priority is True: + xml.set("priority", "true") + + return xml + + @staticmethod + def _parse_element(elem): + return PermissionRule(elem.tag, elem.get( + "permission"), elem.get("priority")) + + @staticmethod + def _parse_body(body): + xml = etree.fromstring(body) + ret = set() + + for c in xml: + if c.tag in ["Grant", "Deny"]: + ret.add(PermissionRule._parse_element(c)) + + return ret + + def __str__(self): + return str(self._action) + "(" + str(self._permission) + ")" + \ + ("P" if self._priority is True else "") + + def __repr__(self): + return str(self) + + def __hash__(self): + return hash(str(self).lower()) + + def __eq__(self, other): + return str(other).lower() == str(self).lower() diff --git a/src/linkahead/common/datatype.py b/src/linkahead/common/datatype.py new file mode 100644 index 0000000000000000000000000000000000000000..832844567bca31f4c46e205094daa709a8af9e71 --- /dev/null +++ b/src/linkahead/common/datatype.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2020 IndiScale GmbH +# Copyright (C) 2020 Henrik tom Wörden, IndiScale GmbH +# Copyright (C) 2020 Daniel Hornung (d.hornung@indiscale.com) +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +import re + +from ..exceptions import EmptyUniqueQueryError, QueryNotUniqueError + +DOUBLE = "DOUBLE" +REFERENCE = "REFERENCE" +TEXT = "TEXT" +DATETIME = "DATETIME" +INTEGER = "INTEGER" +FILE = "FILE" +BOOLEAN = "BOOLEAN" + + +def LIST(datatype): + if hasattr(datatype, "name"): + datatype = datatype.name + + return "LIST<" + str(datatype) + ">" + + +def get_list_datatype(datatype): + """ returns the datatype of the elements in the list """ + if not isinstance(datatype, str): + return None + match = re.match("LIST(<|<)(?P<datatype>.*)(>|>)", datatype) + + if match is not None: + return match.group("datatype") + else: + return None + + +def is_list_datatype(datatype): + """ returns whether the datatype is a list """ + + return get_list_datatype(datatype) is not None + + +def is_reference(datatype): + """Returns whether the value is a reference + + FILE and REFERENCE properties are examples, but also datatypes that are + RecordTypes. + + Parameters + ---------- + datatype : str + The datatype to check. + + Returns + ------- + bool + True if the datatype is a not base datatype or a list of a base datatype. + Otherwise False is returned. + """ + + if datatype is None: + raise ValueError("Cannot decide whether datatype is reference if None" + " is supplied") + + if datatype in [DOUBLE, BOOLEAN, INTEGER, TEXT, DATETIME]: + return False + elif is_list_datatype(datatype): + return is_reference(get_list_datatype(datatype)) + else: + return True + + +def get_referenced_recordtype(datatype): + """Return the record type of the referenced datatype. + + Raises + ------ + ValueError + In cases where datatype is not a reference, the list does not have + a referenced record type or the datatype is a FILE. + + Parameters + ---------- + datatype : str + The datatype to check. + + Returns + ------- + str + String containing the name of the referenced datatype. + """ + + if not is_reference(datatype): + raise ValueError("datatype must be a reference") + + if is_list_datatype(datatype): + datatype = get_list_datatype(datatype) + if datatype is None: + raise ValueError("list does not have a list datatype") + + if datatype == FILE: + raise ValueError( + "FILE references are not considered references with a record type") + + return datatype + + +def get_id_of_datatype(datatype): + """ returns the id of a Record Type + + This is not trivial, as queries may also return children. A check comparing + names is necessary. + + Parameters + ---------- + datatype : string + A datatype, e.g. DOUBLE, or LIST<Person> + + Returns + ------- + The id of the RecordType with the same name as the datatype. + + Raises + ------ + QueryNotUniqueError + If there are more than one entities with the same name as the datatype. + EmptyUniqueQueryError + If there is no entity with the name of the datatype. + """ + + from .models import execute_query + if is_list_datatype(datatype): + datatype = get_list_datatype(datatype) + q = "FIND RECORDTYPE {}".format(datatype) + + # we cannot use unique=True here, because there might be subtypes + res = execute_query(q) + res = [el for el in res if el.name.lower() == datatype.lower()] + + if len(res) > 1: + raise QueryNotUniqueError( + "Name {} did not lead to unique result; Missing " + "implementation".format(datatype)) + elif len(res) != 1: + raise EmptyUniqueQueryError( + "No RecordType named {}".format(datatype)) + + return res[0].id diff --git a/src/linkahead/common/models.py b/src/linkahead/common/models.py new file mode 100644 index 0000000000000000000000000000000000000000..75ee469bfb78f43054cffd2d29d723804ababc5f --- /dev/null +++ b/src/linkahead/common/models.py @@ -0,0 +1,4868 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020-2023 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2020-2023 Florian Spreckelsen <f.spreckelsen@indiscale.com> +# Copyright (C) 2020-2022 Timm Fitschen <t.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# + +""" +Collection of the central classes of the LinkAhead client, namely the Entity class +and all of its subclasses and the Container class which is used to carry out +transactions. + +All additional classes are either important for the entities or the +transactions. +""" + +from __future__ import annotations # Can be removed with 3.10. +from __future__ import print_function, unicode_literals + +import re +import sys +from builtins import str +from copy import deepcopy +from functools import cmp_to_key +from hashlib import sha512 +from os import listdir +from os.path import isdir +from random import randint +from tempfile import NamedTemporaryFile +from typing import Any, Optional +from warnings import warn + +from lxml import etree + +from ..configuration import get_config +from ..connection.connection import get_connection +from ..connection.encode import MultipartParam, multipart_encode +from ..exceptions import (AmbiguousEntityError, AuthorizationError, + ConsistencyError, EmptyUniqueQueryError, + EntityDoesNotExistError, EntityError, + EntityHasNoDatatypeError, HTTPURITooLongError, + LinkAheadConnectionError, LinkAheadException, + MismatchingEntitiesError, PagingConsistencyError, + QueryNotUniqueError, TransactionError, + UniqueNamesError, UnqualifiedParentsError, + UnqualifiedPropertiesError) +from .datatype import (BOOLEAN, DATETIME, DOUBLE, INTEGER, TEXT, + is_list_datatype, is_reference) +from .state import State +from .timezone import TimeZone +from .utils import uuid, xml2str +from .versioning import Version + +_ENTITY_URI_SEGMENT = "Entity" + +# importances/inheritance +OBLIGATORY = "OBLIGATORY" +SUGGESTED = "SUGGESTED" +RECOMMENDED = "RECOMMENDED" +FIX = "FIX" +ALL = "ALL" +NONE = "NONE" + + +SPECIAL_ATTRIBUTES = ["name", "role", "datatype", "description", + "id", "path", "checksum", "size"] + + +class Entity: + + """Entity is a generic LinkAhead object. + + The majority of all methods of the derived classes (e.g. Record, + RecordType, Property ...) are defined here, e.g. add_property, + add_parent, retrieve ... Each entity may have some attributes (id, + name, description, ...), a set of properties, a set of parent + entities and a set of messages which are generated through the + processing in the client library or the server, or which can be used + by the user to control several server-side plug-ins. + """ + + def __init__(self, name=None, id=None, description=None, # @ReservedAssignment + datatype=None, value=None, **kwargs): + self.__role = kwargs["role"] if "role" in kwargs else None + self._checksum = None + self._size = None + self._upload = None + # If an entity is used (e.g. as parent), it is wrapped instead of being used directly. + # see Entity._wrap() + self._wrapped_entity = None + self._version = None + self._cuid = None + self._flags = dict() + self.__value = None + self.__datatype = None + self.datatype = datatype + self.value = value + self.messages = Messages() + self.properties = _Properties() + self.parents = _ParentList() + self.path = None + self.file = None + self.unit = None + self.acl = None + self.permissions = None + self.is_valid = lambda: False + self.is_deleted = lambda: False + self.name = name + self.description = description + self.id = id + self.state = None + + def copy(self): + """ + Return a copy of entity. + + If deep == True return a deep copy, recursively copying all sub entities. + + Standard properties are copied using add_property. + Special attributes, as defined by the global variable SPECIAL_ATTRIBUTES and additionaly + the "value" are copied using setattr. + """ + if self.role == "File": + new = File() + elif self.role == "Property": + new = Property() + elif self.role == "RecordType": + new = RecordType() + elif self.role == "Record": + new = Record() + elif self.role == "Entity": + new = Entity() + else: + raise RuntimeError("Unkonwn role.") + + # Copy special attributes: + # TODO: this might rise an exception when copying + # special file attributes like checksum and size. + for attribute in SPECIAL_ATTRIBUTES + ["value"]: + val = getattr(self, attribute) + if val is not None: + setattr(new, attribute, val) + + # Copy parents: + for p in self.parents: + new.add_parent(p) + + # Copy properties: + for p in self.properties: + new.add_property(p, importance=self.get_importance(p)) + + return new + + @property + def version(self): + if self._version is not None or self._wrapped_entity is None: + return self._version + + return self._wrapped_entity.version + + @version.setter + def version(self, version): + self._version = version + + @property + def role(self): + return self.__role + + @role.setter + def role(self, role): + if role is not None and role.lower() == "entity": + self.__role = None + else: + self.__role = role + + @property + def size(self): + if self._size is not None: + return int(self._size) + + if self._wrapped_entity is None: + return None + + return self._wrapped_entity.size + + @property + def id(self): + if self.__id is not None: + return self.__id + + if self._wrapped_entity is None: + return None + + return self._wrapped_entity.id + + @id.setter + def id(self, new_id): + if new_id is not None: + self.__id = int(new_id) + else: + self.__id = None + + @property + def name(self): + if self.__name is not None or self._wrapped_entity is None: + return self.__name + + return self._wrapped_entity.name + + @name.setter + def name(self, new_name): + self.__name = new_name + + @property + def datatype(self): + if self.__datatype is not None or self._wrapped_entity is None: + return self.__datatype + + return self._wrapped_entity.datatype + + @datatype.setter + def datatype(self, new_type): + # re-parse value + self.__value = _parse_value(new_type, self.__value) + self.__datatype = new_type + + @property + def description(self): + if self.__description is not None or self._wrapped_entity is None: + return self.__description + + return self._wrapped_entity.description + + @property + def checksum(self): + return self._checksum + + @description.setter + def description(self, new_description): + self.__description = new_description + + @property + def unit(self): + if self.__unit is not None or self._wrapped_entity is None: + return self.__unit + + return self._wrapped_entity.unit + + @unit.setter + def unit(self, new_unit): + self.__unit = new_unit + + @property + def value(self): + if self.__value is not None or self._wrapped_entity is None: + return self.__value + + return self._wrapped_entity.value + + @value.setter + def value(self, new_value): + self.__value = _parse_value(self.datatype, new_value) + + @property + def path(self): + if self.__path is not None or self._wrapped_entity is None: + return self.__path + + return self._wrapped_entity.path + + @path.setter + def path(self, new_path): + self.__path = new_path + + @property + def thumbnail(self): + if self.__thumbnail is not None or self._wrapped_entity is None: + return self.__thumbnail + + return self._wrapped_entity.thumbnail + + @thumbnail.setter + def thumbnail(self, new_thumbnail): + self.__thumbnail = new_thumbnail + + @property + def file(self): + if self.__file is not None or self._wrapped_entity is None: + return self.__file + + return self._wrapped_entity.file + + @file.setter + def file(self, new_file): + self.__file = new_file + + @property + def pickup(self): + if self.__pickup is not None or self._wrapped_entity is None: + return self.__pickup + + return self._wrapped_entity.pickup + + @pickup.setter + def pickup(self, new_pickup): + self.__pickup = new_pickup + + def grant(self, realm=None, username=None, role=None, + permission=None, priority=False, revoke_denial=True): + """Grant a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing denial rule would be revoked, because + otherwise this grant wouldn't have any effect. However, for keeping + contradicting rules pass revoke_denial=False. + + Parameters + ---------- + permission: str + The permission to be granted. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is granted with priority over non-priority + rules. + revoke_denial: bool, default True + Whether a contradicting denial (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + self.acl.grant(realm=realm, username=username, role=role, + permission=permission, priority=priority, + revoke_denial=revoke_denial) + + def deny(self, realm=None, username=None, role=None, + permission=None, priority=False, revoke_grant=True): + """Deny a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing grant rule would be revoked, because + otherwise this denial would override the grant rules anyways. However, + for keeping contradicting rules pass revoke_grant=False. + + Parameters + ---------- + permission: str + The permission to be denied. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is denied with priority over non-priority + rules. + revoke_grant: bool, default True + Whether a contradicting grant (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + self.acl.deny(realm=realm, username=username, role=role, + permission=permission, priority=priority, + revoke_grant=revoke_grant) + + def revoke_denial(self, realm=None, username=None, + role=None, permission=None, priority=False): + self.acl.revoke_denial( + realm=realm, + username=username, + role=role, + permission=permission, + priority=priority) + + def revoke_grant(self, realm=None, username=None, + role=None, permission=None, priority=False): + self.acl.revoke_grant( + realm=realm, + username=username, + role=role, + permission=permission, + priority=priority) + + def is_permitted(self, permission, role=None): + if role is None: + # pylint: disable=unsupported-membership-test + + return permission in self.permissions + else: + self.acl.is_permitted(permission=permission) + + def get_all_messages(self): + ret = Messages() + ret.append(self.messages) + + for p in self.properties: + ret.extend(p.get_all_messages()) + + for p in self.parents: + ret.extend(p.get_all_messages()) + + return ret + + def clear_server_messages(self): + self.messages.clear_server_messages() + + for p in self.properties: + p.clear_server_messages() + + for p in self.parents: + p.clear_server_messages() + + return self + + def get_importance(self, property): # @ReservedAssignment + """Get the importance of a given property regarding this entity.""" + + if self.properties is not None: + return self.properties.get_importance(property) + + def remove_property(self, property): # @ReservedAssignment + self.properties.remove(property) + + return self + + def remove_value_from_property(self, property_name: str, value: Any, + remove_if_empty_afterwards: Optional[bool] = True): + """Remove a value from a property given by name. + + Do nothing if this entity does not have a property of this + ``property_name`` or if the property value is different of the given + ``value``. By default, the property is removed from this entity if it + becomes empty (i.e., value=None) through removal of the value. This + behavior can be changed by setting ``remove_if_empty_afterwards`` to + ``False`` in which case the property remains. + + Notes + ----- + If the property value is a list and the value to be removed occurs more + than once in this list, only its first occurrance is deleted (similar + to the behavior of Python's ``list.remove()``.) + + If the property was empty (prop.value == None) before, the property is + not removed afterwards even if ``remove_if_empty_afterwards`` is set to + ``True``. Rationale: the property being empty is not an effect of + calling this function. + + Parameters + ---------- + property_name : str + Name of the property from which the ``value`` will be removed. + + value + Value that is to be removed. + + remove_if_empty_afterwards : bool, optional + Whether the property shall be removed from this entity if it is + emptied by removing the ``value``. Default is ``True``. + + Returns + ------- + self + This entity. + + """ + + if self.get_property(property_name) is None: + return self + if self.get_property(property_name).value is None: + remove_if_empty_afterwards = False + empty_afterwards = False + if isinstance(self.get_property(property_name).value, list): + if value in self.get_property(property_name).value: + self.get_property(property_name).value.remove(value) + if self.get_property(property_name).value == []: + self.get_property(property_name).value = None + empty_afterwards = True + elif self.get_property(property_name).value == value: + self.get_property(property_name).value = None + empty_afterwards = True + if remove_if_empty_afterwards and empty_afterwards: + self.remove_property(property_name) + + return self + + def remove_parent(self, parent): + self.parents.remove(parent) + + return self + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=None, inheritance=None): # @ReservedAssignment + """Add a property to this entity. + + The first parameter is meant to identify the property entity either via + its id or name, or by providing the corresponding ``Entity`` Python + object. The second parameter is the value of the new property. Any other + named parameter may be passed by means of the keywwords. Accepted + keywords are: id, name, description, importance, inheritance, datatype, + and unit. + + Notes + ----- + If you want to add a property to an already existing entity, the + property ``id`` of that property needs to be specified before you send + the updated entity to the server. + + Parameters + ---------- + property : int, str, Entity, optional + An identifier for the property to be added, either its name, its id, + or the corresponding Entity Python object. If ``None``, either the + `name` or the `id` argument have to be specified explicitly. Default + is ``None``. + value : int, str, bool, datetime, Entity, or list of these types, optional + The value of the new property. In case of a reference to another + entity, this value may be the referenced entities id or the + ``Entity`` as a Python object. Default is None. + id : int, optional + Id of the property, by default None + name : str, optional + Name of the property, by default None + description : str, optional + Description of the property, by default None + datatype : str, optional + Datatype of the property, by default None + unit : str, optional + Unit of the property, by default None + importance :str, optional + Importance of the property, by default None + inheritance : str, optional + Inheritance of the property, by default None + + Returns + ------- + Entity + This Entity object to which the new property has been added. + + Warns + ----- + UserWarning + If the first parameter is None then id or name must be defined and not be None. + UserWarning + If the first parameter is an integer then it is interpreted as the id and id must be + undefined or None. + UserWarning + If the first parameter is not None and neither an instance of Entity nor an integer it is + interpreted as the name and name must be undefined or None. + + Raises + ------ + ValueError: + If you try to add an ``Entity`` object with File or Record role (or, + equivalently, a ``File`` or ``Record`` object) as a property, a + ``ValueError`` is raised. + + Examples + -------- + Add a simple integer property with the name ``TestProp`` and the value + 27 to a Record: + + >>> import linkahead as db + >>> rec = db.Record(name="TestRec").add_parent(name="TestType") + >>> rec.add_property("TestProp", value=27) # specified by name, you could equally use the property's id if it is known + + You can also use the Python object: + + >>> prop = db.Property(name="TestProp", datatype=db.INTEGER) + >>> rec.add_property(prop, value=27) # specified via the Python object + + In case of updating an existing Record, the Property needs to be + specified by id: + + >>> rec = db.Record(name="TestRec").retrieve() + >>> prop2 = db.Property(name="OtherTestProp").retrieve() + >>> rec.add_property(id=prop2.id, value="My new value") + >>> rec.update() + + Let's look at the more advanced example of adding a list of integers as + value of the above integer ``TestProp``: + + >>> rec.add_property("TestProp", value=[27,28,29], datatype=db.LIST(db.INTEGER)) + + Note that since `TestProp` is a scalar integer Property, the datatype + `LIST<INTEGER>` has to be specified explicitly. + + Finally, we can also add reference properties, specified by the RecordType of the referenced entity. + + >>> ref_rec = db.Record(name="ReferencedRecord").add_parent(name="OtherRT") + >>> rec.add_property(name="OtherRT", value=ref_rec) # or value=ref_rec.id if ref_rec has one set by the server + + See more on adding properties and inserting data in + https://docs.indiscale.com/caosdb-pylib/tutorials/Data-Insertion.html. + + """ + + pid = id + abstract_property = None + + if isinstance(property, Entity): + if property.role is not None and property.role.lower() in ["record", "file"]: + raise ValueError("The property parameter is a {0}. This " + "is very unusual and probably not what you " + "want. Otherwise, construct a property from " + "a {0} using the Property class and add " + "that to this entity.".format(property.role)) + abstract_property = property + elif isinstance(property, int): + if pid is not None: + raise UserWarning("The first parameter was an integer which would normally be interpreted as the id of the property which is to be added. But you have also specified a parameter 'id' in the method call. This is ambiguous and cannot be processed.") + pid = property + id = pid + elif property is not None: + if name is not None: + raise UserWarning("The first parameter was neither an instance of Entity nor an integer. Therefore the string representation of your first parameter would normally be interpreted name of the property which is to be added. But you have also specified a parameter 'name' in the method call. This is ambiguous and cannot be processed.") + name = str(property) + + if property is None and name is None and pid is None: + raise UserWarning( + "This method expects you to pass at least an entity, a name or an id.") + + new_property = Property(name=name, id=id, description=description, datatype=datatype, + value=value, unit=unit) + + if abstract_property is not None: + new_property._wrap(abstract_property) + + # FIXME: this really necessary? + + if new_property.datatype is None and isinstance( + property, (RecordType, Record, File)): + new_property.datatype = property + new_property.value = value + + self.properties.append( + property=new_property, importance=importance, inheritance=inheritance) + + return self + + def add_message(self, msg=None, type=None, code=None, # @ReservedAssignment + description=None, body=None): + """Add a message (msg) to this entity. If and only if no msg is given + this method will created a new message from the parameters type, code, + description, and body. + + @param msg: The message to be added to this entity. + @param type: The type of the message to be added. + @param code: The code of the message to be added. + @param description: The description of the message to be added. + @param body: The body of the message to be added. + """ + + if msg is not None: + pass + else: + msg = Message(description=description, type=type, code=code, + body=body) + self.messages.append(msg) + + return self + + def add_parent(self, parent=None, id=None, name=None, inheritance=None): # @ReservedAssignment + """Add a parent to this entity. + + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + Notes + ----- + Note that the behaviour of the `inheritance` argument currently has not + yet been specified when assigning parents to Records, it only works for + inheritance of RecordTypes (and Properties). For more information, it is + recommended to look into the :ref:`data insertion + tutorial<tutorial-inheritance-properties>`. + + Raises + ------ + UserWarning + If neither a `parent` parameter, nor the `id`, nor `name` + parameter is passed to this method. + + """ + + pid = id + parent_entity = None + + if isinstance(parent, Entity): + parent_entity = parent + elif isinstance(parent, int): + pid = parent + elif parent is not None: + name = str(parent) + + if pid is None and name is None and parent_entity is None: + raise UserWarning( + "This method expects you to pass at least an entity, a name or an id.") + + addp = Parent(id=pid, name=name, inheritance=inheritance) + + if parent_entity is not None: + addp._wrap(parent_entity) + self.parents.append(addp) + + return self + + def has_parent(self, parent: Entity, recursive: bool = True, retrieve: bool = True, + check_name: bool = True, check_id: bool = False): + """Check if this entity has a given parent. + + If 'check_name' and 'check_id' are both False, test for identity + on the Python level. Otherwise use the name and/or ID for the + check. Note that, if checked, name or ID should not be None, + lest the check fail. + +Parameters +---------- + +parent: Entity + Check for this parent. + +recursive: bool, optional + Whether to check recursively. + +check_name: bool, optional + Whether to use the name for ancestry check. + +check_id: bool, optional + Whether to use the ID for ancestry check. + +retrieve: bool, optional + If False, do not retrieve parents from the server. + +Returns +------- +out: bool + True if ``parent`` is a true parent, False otherwise. +""" + + if recursive: + parents = self.get_parents_recursively(retrieve=retrieve) + else: + if retrieve: + parents = [pp.retrieve()._wrapped_entity for pp in self.parents] + else: + parents = [pp._wrapped_entity for pp in self.parents] + + if not (check_name or check_id): + return parent in parents + + name_result = ( + not check_name or + (parent.name is not None and + parent.name in [pp.name for pp in parents])) + id_result = ( + not check_id or + (parent.id is not None and + parent.id in [pp.id for pp in parents])) + + return name_result and id_result + + def get_parents(self): + """Get all parents of this entity. + + @return: _ParentList(list) + """ + + return self.parents + + def get_parents_recursively(self, retrieve: bool = True): + """Get all ancestors of this entity. + +Parameters +---------- + +retrieve: bool, optional + If False, do not retrieve parents from the server. + +Returns +------- +out: List[Entity] + The parents of this Entity +""" + + all_parents = [] + self._get_parent_recursively(all_parents, retrieve=retrieve) + + return all_parents + + def _get_parent_recursively(self, all_parents: list, retrieve: bool = True): + """Get all ancestors with a little helper. + + As a side effect of this method, the ancestors are added to + all_parents. + + @param all_parents: list, The added parents so far. + + @return: None, but see side effects. + """ + + for parent in self.parents: + # TODO: + # Comment on _wrap and _wrapped_entity + # Currently, I (henrik) do not why the wrapping is necessary (and it is not + # documented). However, the following illustrates, why I think, it is a bad idea. + # First you add a parent with rec.add_parent(parent), but then you cannot access + # attributes of parent when you use rec.parents[0] for example becasue you do not get + # the same object but a wrapping object and you need to know that you only get the + # original by accessing the private (!) _wrapped_entity object. + w_parent = parent._wrapped_entity + if retrieve: + parent.retrieve() + for next_parent in parent.parents: + w_parent.add_parent(next_parent) + + if (w_parent.id, w_parent.name) not in [ + (all_p.id, all_p.name) for all_p in all_parents]: + all_parents.append(w_parent) + w_parent._get_parent_recursively(all_parents, retrieve=retrieve) + + def get_parent(self, key): + """Return the first parent matching the key or None if no match exists. + + Parameters + --------- + key : int or Enity or str + The id, Entity, or name of the parent that should be + returned. If an Entity is given, its id or its name is + used to find a matching parent. + + Returns + ------- + parent : Entity + The first parent of this entity that matches the given id, + entity, or name. + + """ + + if isinstance(key, int): + for p in self.parents: + if p.id is not None and int(p.id) == int(key): + return p + elif isinstance(key, Entity): + if key.id is not None: + # first try by id + found = self.get_parent(int(key.id)) + + if found is not None: + return found + # otherwise by name + + return self.get_parent(key.name) + else: + for p in self.parents: + if (p.name is not None + and str(p.name).lower() == str(key).lower()): + + return p + + return None + + def get_properties(self): + """Get all properties of this entity. + + @return: _Properties(list) + """ + + return self.properties + + def get_property(self, pattern): + """ Return the first matching property or None. + + Parameters + ---------- + pattern : str or int or Entity + The name or id to look for (case-insensitive) or an Entity where + the name or id is used to match the properites of this instance. + + Returns + ------- + property : Property + The first Property of this Entity with a matching name or id. + + """ + # entity given + + if (hasattr(pattern, "name") or hasattr(pattern, "id")): + # only return if a result was found, otherwise use id + + if (hasattr(pattern, "name") and pattern.name is not None + and self.get_property(pattern.name) is not None): + + return self.get_property(pattern.name) + + if hasattr(pattern, "id") and pattern.id is not None: + return self.get_property(pattern.id) + + # int given + elif isinstance(pattern, int): + for p in self.properties: + if p.id is not None and int(p.id) == int(pattern): + return p + # str given + elif isinstance(pattern, str): + for p in self.properties: + if (p.name is not None + and str(p.name).lower() == str(pattern).lower()): + + return p + else: + raise ValueError("argument should be entity, int , string") + + return None + + def _get_value_for_selector(self, selector): + """return the value described by the selector + + A selector is a list or a tuple of strings describing a path in an + entity tree with self as root. The last selector may be a special one + like unit or name. + + See also get_property_values() + """ + SPECIAL_SELECTORS = ["unit", "value", "description", "id", "name"] + + if not isinstance(selector, (tuple, list)): + selector = [selector] + + ref = self + + # there are some special selectors which can be applied to the + # final element; if such a special selector exists we split it + # from the list + + if selector[-1].lower() in SPECIAL_SELECTORS: + special_selector = selector[-1] + selector = selector[:-1] + else: + special_selector = None + + # iterating through the entity tree according to the selector + + for subselector in selector: + # selector does not match the structure, we cannot get a + # property of non-entity + + if not isinstance(ref, Entity): + return None + + prop = ref.get_property(subselector) + + # selector does not match the structure, we did not get a + # property + + if prop is None: + return None + + # if the property is a reference, we are interested in the + # corresponding entities attributes + + if isinstance(prop.value, Entity): + ref = prop.value + + # otherwise in the attributes of the property + else: + ref = prop + + # if we saved a special selector before, apply it + + if special_selector is None: + return prop.value + else: + return getattr(ref, special_selector.lower()) + + def get_property_values(self, *selectors): + """ Return a tuple with the values described by the given selectors. + + This represents an entity's properties as if it was a row of a table + with the given columns. + + If the elements of the selectors parameter are tuples, they will return + the properties of the referenced entity, if present. E.g. ("window", + "height") will return the value of the height property of the + referenced window entity. + + The tuple's values correspond to the order of selectors parameter. + + The tuple contains None for all values that are not available in the + entity. That does not necessarily mean, that the values are not stored + in the database (e.g. if a single entity was retrieved without + referenced entities). + + Parameters + ---------- + *selectors : str or tuple of str + Each selector is a list or tuple of property names, e.g. `"height", + "width"`. + + Returns + ------- + row : tuple + A row-like representation of the entity's properties. + """ + row = tuple() + + for selector in selectors: + val = self._get_value_for_selector(selector) + + if isinstance(val, Entity): + val = val.id if val.id is not None else val.name + row += (val,) + + return row + + def get_messages(self): + """Get all messages of this entity. + + @return: Messages(list) + """ + + return self.messages + + def get_warnings(self): + """Get all warning messages of this entity. + + @return Messages(list): Warning messages. + """ + ret = Messages() + + for m in self.messages: + if m.type.lower() == "warning": + ret.append(m) + + return ret + + def get_errors(self): + """Get all error messages of this entity. + + @return Messages(list): Error messages. + """ + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + if self._wrapped_entity is not None: + ret.extend(self._wrapped_entity.get_errors()) + + return ret + + def get_errors_deep(self, roots=None): + """Get all error messages of this entity and all sub-entities / + parents / properties. + + @return A list of tuples. Tuple index 0 contains the error message + and tuple index 1 contains the tree. + """ + roots = [] if roots is None else roots + result_list = list() + ret_self = self.get_errors() + result_list.extend([ + (m, roots) for m in ret_self]) + + for parent in self.get_parents(): + result_list.extend( + parent.get_errors_deep( + roots + [parent])) + + return result_list + + def has_errors(self): + ''' + @return True: if and only if this entities has any error messages. + ''' + + for m in self.messages: + if m.type.lower() == "error": + return True + + return False + + def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): + """Generate an xml representation of this entity. If the parameter xml + is given, all attributes, parents, properties, and messages of this + entity will be added to it instead of creating a new element. + + Raise an error if xml is not a lxml.etree.Element + + @param xml: an xml element to which all attributes, parents, + properties, and messages + are to be added. + @return: xml representation of this entity. + """ + + if xml is None: + # use role as xml tag name, fall-back to "Entity" + elem_tag = "Entity" if self.role is None else self.role + xml = etree.Element(elem_tag) + assert isinstance(xml, etree._Element) + + # unwrap wrapped entity + + if self._wrapped_entity is not None: + xml = self._wrapped_entity.to_xml(xml, add_properties) + + if self.id is not None: + xml.set("id", str(self.id)) + + if self._cuid is not None: + xml.set("cuid", str(self._cuid)) + + if self.name is not None: + xml.set("name", str(self.name)) + + if self.description is not None: + xml.set("description", str(self.description)) + + if self.version is not None: + xml.append(self.version.to_xml()) + + if self.value is not None: + if isinstance(self.value, Entity): + if self.value.id is not None: + xml.text = str(self.value.id) + elif self.value.name is not None: + xml.text = str(self.value.name) + else: + xml.text = str(self.value) + elif isinstance(self.value, list): + for v in self.value: + v_elem = etree.Element("Value") + + if isinstance(v, Entity): + if v.id is not None: + v_elem.text = str(v.id) + elif v.name is not None: + v_elem.text = str(v.name) + else: + v_elem.text = str(v) + elif v == "": + v_elem.append(etree.Element("EmptyString")) + elif v is None: + pass + else: + v_elem.text = str(v) + xml.append(v_elem) + elif self.value == "": + xml.append(etree.Element("EmptyString")) + elif str(self.value) == "nan": + xml.text = "NaN" + else: + xml.text = str(self.value) + + if self.datatype is not None: + if isinstance(self.datatype, Entity): + if self.datatype.id is not None: + xml.set("datatype", str(self.datatype.id)) + elif self.datatype.name is not None: + xml.set("datatype", str(self.datatype.name)) + else: + xml.set("datatype", str(self.datatype)) + else: + xml.set("datatype", str(self.datatype)) + + if self.path is not None: + xml.set("path", self.path) + + if self.file is not None and local_serialization: + xml.set("file", self.file) + + if self._checksum is not None: + xml.set("checksum", self._checksum) + + if self.size is not None: + xml.set("size", str(self.size)) + + if self.unit is not None: + xml.set("unit", str(self.unit)) + + if self.messages is not None: + self.messages.to_xml(xml) + + if self.parents is not None: + self.parents.to_xml(xml) + + if self.properties is not None: + self.properties.to_xml(xml, add_properties) + + if len(self._flags) > 0: + flagattr = "" + + for key in self._flags.keys(): + flag = self._flags[key] + + if flag is not None and flag != "": + flagattr += str(key) + ":" + str(flag) + "," + else: + flagattr += str(key) + "," + xml.set("flag", flagattr) + + if self.acl is not None: + xml.append(self.acl.to_xml()) + + if self.state is not None: + xml.append(self.state.to_xml()) + + return xml + + @staticmethod + def _from_xml(entity, elem): + """Parse a single string representation of an xml element to an entity. + + @param entity: the entity + @param elem: the xml element + """ + + if isinstance(entity, Entity): + entity.role = elem.tag + entity._cuid = elem.get("cuid") + entity.id = elem.get("id") # @ReservedAssignment + entity.name = elem.get("name") + entity.description = elem.get("description") + entity.path = elem.get("path") + entity._checksum = elem.get("checksum") + entity._size = elem.get("size") + entity.datatype = elem.get("datatype") # @ReservedAssignment + entity.unit = elem.get("unit") + entity.file = elem.get("file") + + if hasattr(entity, "affiliation"): + entity.affiliation = elem.get("affiliation") + + vals = list() + + for celem in elem: + + child = _parse_single_xml_element(celem) + + if isinstance(child, Property): + entity.properties.append(property=child, + importance=celem.get("importance"), + inheritance=None) + elif isinstance(child, Parent): + entity.add_parent(child) + elif isinstance(child, ACL): + entity.acl = child + elif isinstance(child, Permissions): + entity.permissions = child + elif isinstance(child, Message): + entity.add_message(child) + elif isinstance(child, Version): + entity.version = child + elif isinstance(child, State): + entity.state = child + elif child is None or hasattr(child, "encode"): + vals.append(child) + elif isinstance(child, Entity): + vals.append(child) + else: + raise TypeError( + 'Child was neither a Property, nor a Parent, nor a Message.\ + Was ' + str(type(child)) + "\n" + str(child)) + + # add VALUE + value = None + + if vals: + # The value[s] have been inside a <Value> tag. + value = vals + elif elem.text is not None and elem.text.strip() != "": + value = elem.text.strip() + + try: + entity.value = value + except ValueError: + # circumvent the parsing. + entity.__value = value + + return entity + + def __repr__(self): + return xml2str(self.to_xml()) + + def retrieve_acl(self): + self.acl = Entity(name=self.name, id=self.id).retrieve( + flags={"ACL": None}).acl + + def update_acl(self): + if self.id is None: + c = Container().retrieve(query=self.name, sync=False) + + if len(c) == 1: + e = c[0] + elif len(c) == 0: + ee = EntityDoesNotExistError( + "The entity to be updated does not exist on the server.", + entity=self + ) + raise TransactionError(ee) + else: + ae = AmbiguousEntityError( + "Could not determine the desired Entity which is to be updated by its name.", + entity=self + ) + raise TransactionError(ae) + else: + e = Container().retrieve(query=self.id, sync=False)[0] + e.acl = ACL(self.acl.to_xml()) + e.update() + + return e + + def delete(self, raise_exception_on_error=True): + return Container().append(self).delete( + raise_exception_on_error=raise_exception_on_error)[0] + + def retrieve(self, unique=True, raise_exception_on_error=True, flags=None): + """Retrieve this entity identified via its id if present and via its + name otherwise. Any locally already existing attributes (name, + description, ...) will be preserved. Any such properties and parents + will be synchronized as well. They will not be overridden. This method + returns a Container containing the this entity. + + Note: If only a name is given this could lead to ambiguities. Usually + this would raise a LinkAheadException. Set the flag 'unique' to False if + this Exception should be suppressed. If unique is False this method + returns a Container object which carries the returned entities. They are + distinct from this one. This entity will no be changed somehow. + + @param unique=True: flag to suppress the ambiguity exception. + + @return + Container with the returned entities or single entity if and only + if unique was True and no exception was raised. + + """ + + if unique: + c = Container().append(self).retrieve( + unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) + + if len(c) == 1: + c[0].messages.extend(c.messages) + + return c[0] + + raise QueryNotUniqueError("This retrieval was not unique!!!") + + return Container().append(self).retrieve( + unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) + + def insert(self, raise_exception_on_error=True, unique=True, + sync=True, strict=False, flags=None): + """Insert this entity into a LinkAhead server. A successful insertion will + generate a new persistent ID for this entity. This entity can be + identified, retrieved, updated, and deleted via this ID until it has + been deleted. + + If the insertion fails, a LinkAheadException will be raised. The server will have returned at + least one error-message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some insertions might cause warning-messages on the server-side, but the entities are inserted + anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from inserting this entity if any warning occurs. + + Parameters + ---------- + strict : bool, optional + Flag for strict mode. Default is False. + raise_exception_on_error : bool, optional + Flag to raise an exception when an error occurs. Default is True. + unique : bool, optional + Flag to only allow insertion of elements with unique names. Default + is True. + flags : dict, optional + A dictionary of flags to be send with the insertion. Default is + None. + + """ + + return Container().append(self).insert( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def update(self, strict=False, raise_exception_on_error=True, + unique=True, flags=None, sync=True): + """Update this entity. + +There are two possible work-flows to perform this update: +First: + 1) retrieve an entity + 2) do changes + 3) call update method + +Second: + 1) construct entity with id + 2) call update method. + + For slight changes the second one it is more comfortable. Furthermore, it is possible to stay + off-line until calling the update method. The name, description, unit, datatype, path, + and value of an entity may be changed. Additionally, properties, parents and messages may be added. + + However, the first one is more powerful: It is possible to delete and change properties, parents + and attributes, which is not possible via the second one for internal reasons (which are reasons + of definiteness). + + If the update fails, a LinkAheadException will be raised. The server will have returned at + least one error message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some updates might cause warning messages on the server-side, but the updates are performed + anyway. Set flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from updating this entity if any warnings occur. + + @param strict=False: Flag for strict mode. + """ + + return Container().append(self).update( + strict=strict, + sync=sync, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + flags=flags)[0] + + def _wrap(self, entity): + """ + When entity shall be used as parent or property it is not added to the corresponding list + (such as the parent list) directly, but another Entity object is created and the original + Entity is wrapped using this function + TODO: document here and in dev docs why this is done. + """ + self._wrapped_entity = entity + + return self + + def set_flag(self, key, value=None): + self._flags[key] = value + + return self + + +def _parse_value(datatype, value): + """Parse the value (from XML input) according to the given datatype + """ + + # Simple values + if value is None: + return value + + if datatype is None: + return value + + if datatype == DOUBLE: + return float(value) + + if datatype == INTEGER: + return int(str(value)) + + if datatype == BOOLEAN: + if str(value).lower() == "true": + return True + elif str(value).lower() == "false": + return False + else: + raise ValueError("Boolean value was {}.".format(value)) + + # Datetime and text are returned as-is + if datatype in [DATETIME, TEXT]: + if isinstance(value, str): + return value + + # deal with collections + if isinstance(datatype, str): + matcher = re.compile(r"^(?P<col>[^<]+)<(?P<dt>[^>]+)>$") + m = matcher.match(datatype) + + if m: + col = m.group("col") + dt = m.group("dt") + + if col == "LIST": + ret = list() + else: + return value + + if hasattr(value, "__iter__") and not isinstance(value, str): + for v in value: + ret.append(_parse_value(dt, v)) + else: + # put a single value into a list since the datatype says so. + ret.append(_parse_value(dt, value)) + + return ret + + # This is for a special case, where the xml parser could not differentiate + # between single values and lists with one element. As + if hasattr(value, "__len__") and len(value) == 1: + return _parse_value(datatype, value[0]) + + # deal with references + if isinstance(value, Entity): + return value + + if isinstance(value, str) and "@" in value: + # probably this is a versioned reference + + return str(value) + else: + # for unversioned references + try: + return int(value) + except ValueError: + # reference via name + + return str(value) + except TypeError as te: + # deal with invalid XML: List of values without appropriate datatype + if isinstance(value, list): + raise TypeError( + "Invalid datatype: List valued properties must be announced by " + "the datatype.\n" + f"Datatype: {datatype}\nvalue: {value}") + else: + # Everything else that's not related to wrong list assignments + raise te + + +def _log_request(request, xml_body=None): + if Container._debug() > 0: + print("\n" + request) + + if xml_body is not None: + print("======== Request body ========\n") + print(xml2str(xml_body)) + print("\n==============================\n") + + +def _log_response(body): + if Container._debug() > 0: + print("\n======== Response body ========\n") + print(body.decode()) + print("\n===============================\n") + + +class QueryTemplate(): + + def __init__(self, id=None, name=None, query=None, description=None): # @ReservedAssignment + + self.id = (int(id) if id is not None else None) + self.role = "QueryTemplate" + self.name = name + self.description = description + self.query = query + self._cuid = None + self.value = None + self.datatype = None + self.messages = Messages() + self.properties = None + self.parents = None + self.path = None + self.file = None + self._checksum = None + self._size = None + self._upload = None + self.unit = None + self.acl = None + self.permissions = None + self.is_valid = lambda: False + self.is_deleted = lambda: False + self.version = None + self.state = None + + def retrieve(self, raise_exception_on_error=True, unique=True, sync=True, + flags=None): + + return Container().append(self).retrieve( + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def insert(self, strict=True, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + + return Container().append(self).insert( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def update(self, strict=True, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + + return Container().append(self).update( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def delete(self, raise_exception_on_error=True): + return Container().append(self).delete( + raise_exception_on_error=raise_exception_on_error)[0] + + def __repr__(self): + return xml2str(self.to_xml()) + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element("QueryTemplate") + + if self.name is not None: + xml.set("name", self.name) + + if self.id is not None: + xml.set("id", str(self.id)) + + if self.description is not None: + xml.set("description", self.description) + + if self.version is not None: + xml.append(self.version.to_xml()) + + if self.query is not None: + queryElem = etree.Element("Query") + queryElem.text = self.query + xml.append(queryElem) + + if self.messages is not None: + self.messages.to_xml(xml) + + if self.acl is not None: + xml.append(self.acl.to_xml()) + + return xml + + @staticmethod + def _from_xml(xml): + if xml.tag.lower() == "querytemplate": + q = QueryTemplate(name=xml.get("name"), + description=xml.get("description"), query=None) + + for e in xml: + if e.tag.lower() == "query": + q.query = e.text + else: + child = _parse_single_xml_element(e) + + if isinstance(child, Message): + q.messages.append(child) + elif isinstance(child, ACL): + q.acl = child + elif isinstance(child, Version): + q.version = child + elif isinstance(child, Permissions): + q.permissions = child + q.id = int(xml.get("id")) + + return q + else: + return None + + def clear_server_messages(self): + self.messages.clear_server_messages() + + def get_parents(self): + return [] + + def get_properties(self): + return [] + + def has_id(self): + return self.id is not None + + def get_errors(self): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + return ret + + def get_messages(self): + return self.messages + + def has_errors(self): + return len(self.get_errors()) > 0 + + +class Parent(Entity): + """The parent entities.""" + + @property + def affiliation(self): + if self.__affiliation is not None or self._wrapped_entity is None: + return self.__affiliation + elif hasattr(self._wrapped_entity, "affiliation"): + return self._wrapped_entity.affiliation + + return + + @affiliation.setter + def affiliation(self, affiliation): + self.__affiliation = affiliation + + def __init__(self, id=None, name=None, description=None, inheritance=None): # @ReservedAssignment + Entity.__init__(self, id=id, name=name, description=description) + + if inheritance is not None: + self.set_flag("inheritance", inheritance) + self.__affiliation = None + + def to_xml(self, xml=None, add_properties=None): + if xml is None: + xml = etree.Element("Parent") + + return super().to_xml(xml=xml, add_properties=add_properties) + + +class _EntityWrapper(object): + pass + + +class _ConcreteProperty(_EntityWrapper): + pass + + +class Property(Entity): + + """LinkAhead's Property object.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def add_parent(self, parent=None, id=None, name=None, inheritance=FIX): + """Add a parent Entity to this Property. + + Parameters + ---------- + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str, default: FIX + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + See Also + -------- + Entity.add_parent + + """ + + return super(Property, self).add_parent(parent=parent, id=id, name=name, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None, datatype=None, + value=None, unit=None): + Entity.__init__(self, id=id, name=name, description=description, + datatype=datatype, value=value, role="Property") + self.unit = unit + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("Property") + + return super(Property, self).to_xml(xml, add_properties) + + def is_reference(self, server_retrieval=False): + """Returns whether this Property is a reference + + Parameters + ---------- + server_retrieval : bool, optional + If True and the datatype is not set, the Property is retrieved from the server, by default False + + Returns + ------- + bool, NoneType + Returns whether this Property is a reference or None if a server call is needed to + check correctly, but server_retrieval is set to False. + + """ + + if self.datatype is None: + + if not self.is_valid(): + # this is a workaround to prevent side effects + # since retrieve currently changes the object + + if server_retrieval: + tmp_prop = deepcopy(self) + """ + remove role to avoid unnessecary ValueError while + retrieving the Entity. + """ + tmp_prop.role = None + tmp_prop.retrieve() + + return tmp_prop.is_reference() + else: + return None + else: + # a valid property without datatype has to be an RT + + return True + else: + return is_reference(self.datatype) + + +class Message(object): + + def __init__(self, type=None, code=None, description=None, body=None): # @ReservedAssignment + self.description = description + self.type = type if type is not None else "Info" + self.code = int(code) if code is not None else None + self.body = body + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element(str(self.type)) + + if self.code is not None: + xml.set("code", str(self.code)) + + if self.description: + xml.set("description", str(self.description)) + + if self.body: + xml.text = str(self.body) + + return xml + + def __repr__(self): + return xml2str(self.to_xml()) + + def __eq__(self, obj): + if isinstance(obj, Message): + return self.type == obj.type and self.code == obj.code and self.description == obj.description + + return False + + def get_code(self): + warn(("get_code is deprecated and will be removed in future. " + "Use self.code instead."), DeprecationWarning) + return int(self.code) + + +class RecordType(Entity): + + """This class represents LinkAhead's RecordType entities.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=RECOMMENDED, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def add_parent(self, parent=None, id=None, name=None, inheritance=OBLIGATORY): + """Add a parent to this RecordType + + Parameters + ---------- + parent : Entity or int or str or None, optional + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str, default OBLIGATORY + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + See Also + -------- + Entity.add_parent + + """ + + return super().add_parent(parent=parent, id=id, name=name, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None, datatype=None): # @ReservedAssignment + Entity.__init__(self, name=name, id=id, description=description, + datatype=datatype, role="RecordType") + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("RecordType") + + return Entity.to_xml(self, xml, add_properties) + + +class Record(Entity): + + """This class represents LinkAhead's Record entities.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None): # @ReservedAssignment + Entity.__init__(self, name=name, id=id, description=description, + role="Record") + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("Record") + + return Entity.to_xml(self, xml, add_properties=ALL) + + +class File(Record): + + """This class represents LinkAhead's file entities. + + For inserting a new file to the server, `path` gives the new location, and + (exactly?) one of `file` and `pickup` should (must?) be given to specify the + source of the file. + + Symlinking from the "extroot" file system is not supported by this API yet, + it can be done manually using the `InsertFilesInDir` flag. For sample code, + look at `test_files.py` in the Python integration tests of the + `load_files.py` script in the advanced user tools. + + @param name: A name for this file record (That's an entity name - not to be + confused with the last segment of the files path). + @param id: An ID. + @param description: A description for this file record. + @param path: The complete path, including the file name, of the file in the + server's "caosroot" file system. + @param file: A local path or python file object. The file designated by + this argument will be uploaded to the server via HTTP. + @param pickup: A file/folder in the DropOffBox (the server will move that + file into its "caosroot" file system). + @param thumbnail: (Local) filename to a thumbnail for this file. + @param properties: A list of properties for this file record. @todo is this + implemented? + @param from_location: Deprecated, use `pickup` instead. + + """ + + def __init__(self, name=None, id=None, description=None, # @ReservedAssignment + path=None, file=None, pickup=None, # @ReservedAssignment + thumbnail=None, from_location=None): + Record.__init__(self, id=id, name=name, description=description) + self.role = "File" + self.datatype = None + + # location in the fileserver + self.path = path + + # local file path or pointer to local file + self.file = file + self.thumbnail = thumbnail + + self.pickup = pickup + + if from_location is not None: + warn(DeprecationWarning( + "Param `from_location` is deprecated, use `pickup instead`.")) + + if self.pickup is None: + self.pickup = from_location + + def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): + """Convert this file to an xml element. + + @return: xml element + """ + + if xml is None: + xml = etree.Element("File") + + return Entity.to_xml(self, xml=xml, add_properties=add_properties, + local_serialization=local_serialization) + + def download(self, target=None): + """Download this file-entity's actual file from the file server. It + will be stored to the target or will be hold as a temporary file. + + @param target: Where to store this file. + @return: local path of the downloaded file. + """ + self.clear_server_messages() + + if target: + file_ = open(target, 'wb') + else: + file_ = NamedTemporaryFile(mode='wb', delete=False) + checksum = File.download_from_path(file_, self.path) + + if self._checksum is not None and self._checksum.lower() != checksum.hexdigest().lower(): + raise ConsistencyError( + "The downloaded file had an invalid checksum. Maybe the download did not finish?") + + return file_.name + + @staticmethod + def download_from_path(target_file, path): + + _log_request("GET (download): " + path) + response = get_connection().download_file(path) + + data = response.read(8000) + checksum = sha512() + + while data: + target_file.write(data) + checksum.update(data) + data = response.read(8000) + target_file.close() + + return checksum + + @staticmethod + def _get_checksum(files): + import locale + + if hasattr(files, "name"): + return File._get_checksum_single_file(files.name) + else: + if isdir(files): + checksumappend = "" + + for child in sorted(listdir(files), + key=cmp_to_key(locale.strcoll)): + + if isdir(files + '/' + child): + checksumappend += child + checksumappend += File._get_checksum(files + "/" + child) + checksum = sha512() + checksum.update(checksumappend.encode('utf-8')) + + return checksum.hexdigest() + else: + return File._get_checksum_single_file(files) + + @staticmethod + def _get_checksum_single_file(single_file): + _file = open(single_file, 'rb') + data = _file.read(1000) + checksum = sha512() + + while data: + checksum.update(data) + data = _file.read(1000) + _file.close() + + return checksum.hexdigest() + + def add_property(self, property=None, id=None, name=None, description=None, datatype=None, + value=None, unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + +class _Properties(list): + + def __init__(self): + list.__init__(self) + self._importance = dict() + self._inheritance = dict() + self._element_by_name = dict() + self._element_by_id = dict() + + def get_importance(self, property): # @ReservedAssignment + if property is not None: + if hasattr(property, "encode"): + property = self.get_by_name(property) # @ReservedAssignment + + return self._importance.get(property) + + def set_importance(self, property, importance): # @ReservedAssignment + if property is not None: + self._importance[property] = importance + + def get_by_name(self, name): + """Get a property of this list via it's name. Raises a LinkAheadException + if not exactly one property has this name. + + @param name: the name of the property to be returned. + @return: A property + """ + + return self._element_by_name[name] + + def extend(self, parents): + self.append(parents) + + return self + + def append(self, property, importance=None, inheritance=None): # @ReservedAssignment + if isinstance(property, list): + for p in property: + self.append(p, importance, inheritance) + + return + + if isinstance(property, Entity): + if importance is not None: + self._importance[property] = importance + + if inheritance is not None: + self._inheritance[property] = inheritance + else: + self._inheritance[property] = FIX + + if property.id is not None: + self._element_by_id[str(property.id)] = property + + if property.name is not None: + self._element_by_name[property.name] = property + list.append(self, property) + else: + raise TypeError("Argument was not an entity") + + return self + + def to_xml(self, add_to_element, add_properties): + for p in self: + importance = self._importance.get(p) + + if add_properties == FIX and not importance == FIX: + continue + + pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX) + + if p in self._importance: + pelem.set("importance", importance) + + if p in self._inheritance: + pelem.set("flag", "inheritance:" + + str(self._inheritance.get(p))) + add_to_element.append(pelem) + + return self + + def __repr__(self): + xml = etree.Element("PropertyList") + self.to_xml(xml, add_properties=FIX) + + return xml2str(xml) + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with that cuid in this container.") + + def remove(self, prop): + if isinstance(prop, Entity): + if prop in self: + list.remove(self, prop) + + return + else: + if prop.id is not None: + # by id + + for e in self: + if e.id is not None and e.id == prop.id: + list.remove(self, e) + + return + + if prop.name is not None: + # by name + + for e in self: + if e.name is not None and e.name == prop.name: + list.remove(self, e) + + return + elif hasattr(prop, "encode"): + # by name + + for e in self: + if e.name is not None and str(e.name) == str(prop): + list.remove(self, e) + + return + elif isinstance(prop, int): + # by id + + for e in self: + if e.id is not None and e.id == prop: + list.remove(self, e) + + return + raise KeyError(str(prop) + " not found.") + + +class _ParentList(list): + # TODO unclear why this class is private. Isn't it use full for users? + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with that cuid in this container.") + + def __init__(self): + list.__init__(self) + self._element_by_name = dict() + self._element_by_id = dict() + + def extend(self, parents): + self.append(parents) + + return self + + def append(self, parent): # @ReservedAssignment + if isinstance(parent, list): + for p in parent: + self.append(p) + + return + + if isinstance(parent, Entity): + if parent.id: + self._element_by_id[str(parent.id)] = parent + + if parent.name: + self._element_by_name[parent.name] = parent + list.append(self, parent) + else: + raise TypeError("Argument was not an Entity") + + return self + + def to_xml(self, add_to_element): + for p in self: + pelem = etree.Element("Parent") + + if p.id is not None: + pelem.set("id", str(p.id)) + + if p._cuid is not None: + pelem.set("cuid", str(p._cuid)) + + if p.name is not None: + pelem.set("name", str(p.name)) + + if p.description is not None: + pelem.set("description", str(p.description)) + + if len(p._flags) > 0: + flagattr = "" + + for key in p._flags.keys(): + flag = p._flags[key] + + if flag is not None and flag != "": + flagattr += str(key) + ":" + str(flag) + "," + else: + flagattr += str(key) + "," + pelem.set("flag", flagattr) + add_to_element.append(pelem) + + def __repr__(self): + xml = etree.Element("ParentList") + self.to_xml(xml) + + return xml2str(xml) + + def remove(self, parent): + if isinstance(parent, Entity): + if parent in self: + list.remove(self, parent) + else: + if parent.id is not None: + # by id + + for e in self: + if e.id is not None and e.id == parent.id: + list.remove(self, e) + + return + + if parent.name is not None: + # by name + + for e in self: + if e.name is not None and e.name == parent.name: + list.remove(self, e) + + return + elif hasattr(parent, "encode"): + # by name + + for e in self: + if e.name is not None and e.name == parent: + list.remove(self, e) + + return + elif isinstance(parent, int): + # by id + + for e in self: + if e.id is not None and e.id == parent: + list.remove(self, e) + + return + raise KeyError(str(parent) + " not found.") + + +class Messages(list): + """This specialization of list stores error, warning, info, and other + messages. The mentioned three messages types play a special role. + They are generated by the client and the server while processing the entity + to which the message in question belongs. It is RECOMMENDED NOT to specify + such messages manually. The other messages are ignored by the server unless + there is a plug-in which interprets them. + + Any message MUST have a type. It MAY have a code (an integer), a description (short string), + or a body (longer string): + + <$Type code=$code description=$description>$body</$Type> + + Error, warning, and info messages will be deleted before any transaction. + + Examples: + <<< msgs = Messages() + + <<< # create Message + <<< msg = Message(type="HelloWorld", code=1, description="Greeting the world", body="Hello, world!") + + <<< # append it to the Messages + <<< msgs.append(msg) + + <<< # use Messages as list of Message objects + <<< for m in msgs: + ... assert isinstance(m,Message) + + <<< # remove it + <<< msgs.remove(msg) + + <<< # ok append it again ... + <<< msgs.append(msg) + <<< # get it back via get(...) and the key tuple (type, code) + <<< assert id(msgs.get("HelloWorld",1))==id(msg) + """ + + def clear_server_messages(self): + """Removes all messages of type error, warning and info. All other + messages types are custom types which should be handled by custom + code.""" + rem = [] + + for m in self: + if m.type.lower() in ["error", "warning", "info"]: + rem.append(m) + + for m in rem: + self.remove(m) + + ####################################################################### + # can be removed after 01.07.24 + # default implementation of list is sufficient + def __setitem__(self, key, value): # @ReservedAssignment + if not isinstance(value, Message): + warn("__setitem__ will in future only accept Message objects as second argument. " + "You will no longe be" + " able to pass bodys such that Message object is created on the fly", + DeprecationWarning) + if not isinstance(key, int): + warn("__setitem__ will in future only accept int as first argument", + DeprecationWarning) + if isinstance(key, tuple): + if len(key) == 2: + type = key[0] # @ReservedAssignment + code = key[1] + elif len(key) == 1: + type = key[0] # @ReservedAssignment + code = None + else: + raise TypeError( + "('type', 'code'), ('type'), or 'type' expected.") + elif isinstance(key, Messages._msg_key): + type = key._type # @ReservedAssignment + code = key._code + else: + type = key # @ReservedAssignment + code = None + + if isinstance(value, tuple): + if len(value) == 2: + description = value[0] + body = value[1] + elif len(value) == 1: + body = value[0] + description = None + else: + raise TypeError( + "('description', 'body'), ('body'), or 'body' expected.") + + if isinstance(value, Message): + body = value.body + description = value.description + m = Message + else: + body = value + description = None + m = Message(type=type, code=code, description=description, body=body) + if isinstance(key, int): + super().__setitem__(key, m) + else: + self.append(m) + + def __getitem__(self, key): + if not isinstance(key, int): + warn("__getitem__ only supports integer keys in future.", DeprecationWarning) + if isinstance(key, tuple): + if len(key) == 2: + type = key[0] # @ReservedAssignment + code = key[1] + elif len(key) == 1: + type = key[0] # @ReservedAssignment + code = None + else: + raise TypeError( + "('type', 'code'), ('type'), or 'type' expected.") + elif isinstance(key, int) and key >= 0: + return super().__getitem__(key) + else: + type = key # @ReservedAssignment + code = None + m = self.get(type, code) + if m is None: + raise KeyError() + if m.description: + return (m.description, m.body) + else: + return m.body + + def __delitem__(self, key): + if isinstance(key, tuple): + warn("__delitem__ only supports integer keys in future.", DeprecationWarning) + if self.get(key[0], key[1]) is not None: + self.remove(self.get(key[0], key[1])) + else: + super().__delitem__(key) + + def remove(self, obj, obj2=None): + if obj2 is not None: + warn("Supplying a second argument to remove is deprecated.", + DeprecationWarning) + super().remove(self.get(obj, obj2)) + else: + super().remove(obj) + + def append(self, msg): + if isinstance(msg, Messages) or isinstance(msg, list): + warn("Supplying a list-like object to append is deprecated. Please use extend" + " instead.", DeprecationWarning) + for m in msg: + self.append(m) + return + + super().append(msg) + + @staticmethod + def _hash(t, c): + return hash(str(t).lower() + (str(",") + str(c) if c is not None else '')) + # end remove + ####################################################################### + + def get(self, type, code=None, default=None, exact=False): # @ReservedAssignment + """ + returns a message from the list that kind of matches type and code + + case and types (str/int) are ignored + + If no suitable message is found, the default argument is returned + If exact=True, the message has to match code and type exactly + """ + if not exact: + warn("The fuzzy mode (exact=False) is deprecated. Please use exact in future.", + DeprecationWarning) + + for msg in self: + if exact: + if msg.type == type and msg.code == code: + return msg + else: + if self._hash(msg.type, msg.code) == self._hash(type, code): + return msg + + return default + + def to_xml(self, add_to_element): + for m in self: + melem = m.to_xml() + add_to_element.append(melem) + + def __repr__(self): + xml = etree.Element("Messages") + self.to_xml(xml) + + return xml2str(xml) + + ####################################################################### + # can be removed after 01.07.24 + class _msg_key: + + def __init__(self, type, code): # @ReservedAssignment + warn("This class is deprecated.", DeprecationWarning) + self._type = type + self._code = code + + @staticmethod + def get(msg): + return Messages._msg_key(msg.type, msg.code) + + def __eq__(self, obj): + return self.__hash__() == obj.__hash__() + + def __hash__(self): + return hash(str(self._type).lower() + (str(",") + str(self._code) + if self._code is not None else '')) + + def __repr__(self): + return str(self._type) + (str(",") + str(self._code) + if self._code is not None else '') + # end remove + ####################################################################### + + +class _Messages(Messages): + def __init__(self, *args, **kwargs): + warn("_Messages is deprecated. " + "Use class Messages instead and beware of the slightly different API of the new" + " Messages class", DeprecationWarning) + super().__init__(*args, **kwargs) + + +def _basic_sync(e_local, e_remote): + '''Copy all state from a one entity to another. + + This method is used to syncronize an entity with a remote (i.e. a newly + retrieved) one. + + Any entity state of the local one will be overriden. + + Parameters + ---------- + e_local : Entity + Destination of the copy. + e_local : Entity + Source of the copy. + + + Returns + ------- + e_local : Entity + The syncronized entity. + ''' + if e_local is None or e_remote is None: + return None + if e_local.role is None: + e_local.role = e_remote.role + elif e_remote.role is not None and not e_local.role.lower() == e_remote.role.lower(): + raise ValueError("The resulting entity had a different role ({0}) " + "than the local one ({1}). This probably means, that " + "the entity was intialized with a wrong class " + "by this client or it has changed in the past and " + "this client did't know about it yet.".format( + e_remote.role, e_local.role)) + + e_local.id = e_remote.id + e_local.name = e_remote.name + e_local.description = e_remote.description + e_local.path = e_remote.path + e_local._checksum = e_remote._checksum + e_local._size = e_remote._size + e_local.datatype = e_remote.datatype + e_local.unit = e_remote.unit + e_local.value = e_remote.value + e_local.properties = e_remote.properties + e_local.parents = e_remote.parents + e_local.messages = e_remote.messages + e_local.acl = e_remote.acl + e_local.permissions = e_remote.permissions + e_local.is_valid = e_remote.is_valid + e_local.is_deleted = e_remote.is_deleted + e_local.version = e_remote.version + e_local.state = e_remote.state + + if hasattr(e_remote, "query"): + e_local.query = e_remote.query + + if hasattr(e_remote, "affiliation"): + e_local.affiliation = e_remote.affiliation + + return e_local + + +def _deletion_sync(e_local, e_remote): + if e_local is None or e_remote is None: + return + + try: + e_remote.get_messages()["info", 10] # try and get the deletion info + except KeyError: + # deletion info wasn't there + e_local.messages = e_remote.messages + + return + + _basic_sync(e_local, e_remote) + e_local.is_valid = lambda: False + e_local.is_deleted = lambda: True + e_local.id = None + + +class Container(list): + """Container is a type safe list for Entities. + + It also provides several short-cuts for transactions like retrieval, + insertion, update, and deletion which are a applied to all entities + in the container or the whole container respectively. + """ + + _debug = staticmethod( + lambda: ( + get_config().getint( + "Container", + "debug") if get_config().has_section("Container") and + get_config().get( + "Container", + "debug") is not None else 0)) + + def is_valid(self): + for e in self: + if not e.is_valid(): + return False + + return True + + def __hash__(self): + return object.__hash__(self) + + def remove(self, entity): + """Remove the first entity from this container which is equal to the + given entity. Raise a ValueError if there is no such entity. + + Alternatively, if the argument is not an entity but an ID, the + contained entity with this ID is removed. + + @param entity: The entity to be removed. + """ + + if entity in self: + super().remove(entity) + else: + for ee in self: + if entity == ee.id: + super().remove(ee) + + return ee + raise ValueError( + "Container.remove(entity): entity not in Container") + + return entity + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with such cuid (" + str(cuid) + ")!") + + def get_entity_by_id(self, id): # @ReservedAssignment + """Get the first entity which has the given id. Note: If several + entities are in this list which have the same id, this method will only + return the first and ignore the others. + + @param name: The id of the entity to be returned. + @return: Entity with the given id. + """ + + for e in self: + if e.id: + if e.id == int(id): + return e + raise KeyError("No entity with such id (" + str(id) + ")!") + + def get_all_errors(self): + """Returns a dictionary with all errors from all entities in this + container. + + The dictionary keys are the ids of those entities having + contained an error. + """ + error_list = dict() + + for e in self: + if isinstance(e, Entity): + el = e.get_errors_deep() + + if len(el) > 0: + error_list[str(e.id)] = el + + return error_list + + def get_entity_by_name(self, name, case_sensitive=True): + """Get the first entity which has the given name. Note: If several + entities are in this list which have the same name, this method will + only return the first and ignore the others. + + @param name: the name of the entity to be returned. + @param case_sensitive (True/False): Do a case-sensitive search for name (or not). + @return: Entity with the given name. + """ + + for e in self: + if e.name is not None: + if case_sensitive and e.name == str(name): + return e + elif not case_sensitive and e.name.lower() == str(name).lower(): + return e + raise KeyError("No entity with such name (" + str(name) + ")!") + + def __init__(self): + """Container is a list of entities which can be + inserted/updated/deleted/retrieved at once.""" + list.__init__(self) + self._timestamp = None + self._srid = None + self.messages = Messages() + + def extend(self, entities): + """Extend this Container by appending all single entities in the given + list of entities. + + @param entities: list of entities. + """ + + if isinstance(entities, Container): + for entity in entities: + self.append(entity) + elif isinstance(entities, (list, set)): + for entity in entities: + self.extend(entity) + elif isinstance(entities, Entity): + self.append(entities) + elif isinstance(entities, int): + self.append(entities) + elif hasattr(entities, "encode"): + self.append(entities) + else: + raise TypeError( + "Expected a list or a container (was " + str(type(entities)) + ").") + + return self + + def append(self, entity): + """Append an entity container. + + If the parameter is an integer an entity with the corresponding ID is appended. + If the parameter is a string an entity with the corresponding name is appended. + Raise a TypeError if the entity is not a sub type of the correct class (as defined + via the constructor). + + @param entity: The entity to be appended. + """ + + if isinstance(entity, Entity): + super().append(entity) + elif isinstance(entity, int): + super().append(Entity(id=entity)) + elif hasattr(entity, "encode"): + super().append(Entity(name=entity)) + elif isinstance(entity, QueryTemplate): + super().append(entity) + else: + warn("Entity was neither an id nor a name nor an entity." + + " (was " + str(type(entity)) + ":\n" + str(entity) + ")") + # raise TypeError( + # "Entity was neither an id nor a name nor an entity." + + # " (was " + str(type(entity)) + "\n" + str(entity) + ")") + + return self + + def to_xml(self, add_to_element=None, local_serialization=False): + """Get an xml tree representing this Container or append all entities + to the given xml element. + + @param add_to_element=None: optional element to which all entities of this container is to be appended. + @return xml element + """ + tmpid = 0 + + # users might already have specified some tmpids. -> look for smallest. + + for e in self: + tmpid = min(tmpid, Container._get_smallest_tmpid(e)) + tmpid -= 1 + + if add_to_element is None: + add_to_element = etree.Element("Entities") + + for m in self.messages: + add_to_element.append(m.to_xml()) + + for e in self: + if e.id is None: + e.id = tmpid + tmpid -= 1 + + for e in self: + if isinstance(e, File): + elem = e.to_xml(local_serialization=local_serialization) + else: + elem = e.to_xml() + add_to_element.append(elem) + + return add_to_element + + def get_errors(self): + """Get all error messages of this container. + + @return Messages: Error messages. + """ + + if self.has_errors(): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + return ret + else: + return None + + def get_warnings(self): + """Get all warning messages of this container. + + @return Messages: Warning messages. + """ + + if self.has_warnings(): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "warning": + ret.append(m) + + return ret + else: + return None + + def get_all_messages(self): + ret = Messages() + + for e in self: + ret.extend(e.get_all_messages()) + + return ret + + def add_message(self, m): + self.messages.append(m) + + return self + + def has_warnings(self): + ''' + @return True: if and only if this container has any warning messages. + ''' + + for m in self.messages: + if m.type.lower() == "warning": + return True + + return False + + def has_errors(self): + ''' + @return True: if and only if this container has any error messages. + ''' + + for m in self.messages: + if m.type.lower() == "error": + return True + + return False + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return xml2str(self.to_xml()) + + @staticmethod + def from_xml(xml_str): + """Creates a Container from the given xml string. + + @return The created Container. + """ + + c = Container() + xml = etree.fromstring(xml_str) + + for element in xml: + e = _parse_single_xml_element(element) + c.append(e) + + return c + + @staticmethod + def _response_to_entities(http_response): + """Parse the response of a Http-request. + + Note: Method is intended for the internal use. + """ + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + if xml.tag.lower() == "response": + c = Container() + + for child in xml: + e = _parse_single_xml_element(child) + + if isinstance(e, Message): + c.messages.append(e) + elif isinstance(e, Query): + c.query = e + + if e.messages is not None: + c.messages.extend(e.messages) + elif isinstance(e, (Entity, QueryTemplate)): + e.is_deleted = lambda: False + + if e.has_errors() is True: + e.is_valid = lambda: False + elif e.id is None or e.id < 0: + e.is_valid = lambda: False + else: + e.is_valid = lambda: True + c.append(e) + else: + # ignore + pass + c._timestamp = xml.get("timestamp") + c._srid = xml.get("srid") + + return c + else: + raise LinkAheadException( + "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") + + def _sync(self, container, unique, raise_exception_on_error, + name_case_sensitive=False, strategy=_basic_sync): + """Synchronize this container (C1) with another container (C2). + + That is: 1) Synchronize any entity e1 in C1 with the + corresponding entity e2 from C2 via e1._sync(c2). 2) Add any + leftover entity from C2 to C1. + """ + # TODO: This method is extremely slow. E.g. 30 seconds for 1000 + # entities. + + sync_dict = self._calc_sync_dict( + remote_container=container, + unique=unique, + raise_exception_on_error=raise_exception_on_error, + name_case_sensitive=name_case_sensitive) + + # sync every entity in this container + + for entity in self: + try: + e_sync = sync_dict[entity] + + if e_sync is not None: + strategy(entity, e_sync.pop()) + + for e in e_sync: + self.append(e) + except KeyError: + pass + + # add leftover entities + try: + if sync_dict[self] is not None: + for e in sync_dict[self]: + self.append(e) + except KeyError: + pass + + # messages: + + for m in container.messages: + self.add_message(m) + + self._timestamp = container._timestamp + self._srid = container._srid + + def _calc_sync_dict(self, remote_container, unique, + raise_exception_on_error, name_case_sensitive): + # self is local, remote_container is remote. + + # which is to be synced with which: + # sync_dict[local_entity]=sync_remote_enities + sync_dict = dict() + + # list of remote entities which already have a local equivalent + used_remote_entities = [] + + # match by cuid + + for local_entity in self: + + sync_dict[local_entity] = None + + if local_entity._cuid is not None: + # a list of remote entities which are equivalents of + # local_entity + sync_remote_entities = [] + + for remote_entity in remote_container: + if remote_entity._cuid is not None and str(remote_entity._cuid) == str( + local_entity._cuid) and remote_entity not in used_remote_entities: + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. CUID " + \ + str(local_entity._cuid) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by id + + for local_entity in self: + if sync_dict[local_entity] is None and local_entity.id is not None: + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.id is not None + and remote_entity.id == local_entity.id + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. ID " + \ + str(local_entity.id) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by path + + for local_entity in self: + if (sync_dict[local_entity] is None + and local_entity.path is not None): + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.path is not None + and str(remote_entity.path) == ( + local_entity.path + + if local_entity.path.startswith("/") else "/" + + local_entity.path) + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. Path " + \ + str(local_entity.path) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by name + + for local_entity in self: + if (sync_dict[local_entity] is None + and local_entity.name is not None): + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.name is not None + and (str(remote_entity.name) == str(local_entity.name) + or + (name_case_sensitive is False and + str(remote_entity.name).lower() == str( + local_entity.name).lower())) + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. Name " + \ + str(local_entity.name) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # add remaining entities to this remote_container + sync_remote_entities = [] + + for remote_entity in remote_container: + if not (remote_entity in used_remote_entities): + sync_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[self] = sync_remote_entities + + if unique and len(sync_remote_entities) != 0: + msg = "Request was not unique. There are " + \ + str(len(sync_remote_entities)) + \ + " entities which could not be matched to one of the requested ones." + remote_container.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + return sync_dict + + def _test_dependencies_in_container(self, container): + """This function returns those elements of a given container that are a dependency of another element of the same container. + + Args: + container (Container): a linkahead container + + Returns: + [set]: a set of unique elements that are a dependency of another element of `container` + """ + item_id = set() + is_parent = set() + is_property = set() + is_being_referenced = set() + dependent_parents = set() + dependent_properties = set() + dependent_references = set() + dependencies = set() + + for container_item in container: + item_id.add(container_item.id) + + for parents in container_item.get_parents(): + is_parent.add(parents.id) + + for references in container_item.get_properties(): + if is_reference(references.datatype): + # add only if it is a reference, not a property + + if references.value is None: + continue + elif isinstance(references.value, int): + is_being_referenced.add(references.value) + elif is_list_datatype(references.datatype): + for list_item in references.value: + if isinstance(list_item, int): + is_being_referenced.add(list_item) + else: + is_being_referenced.add(list_item.id) + else: + try: + is_being_referenced.add(references.value.id) + except AttributeError: + pass + + if hasattr(references, 'id'): + is_property.add(references.id) + + dependent_parents = item_id.intersection(is_parent) + dependent_properties = item_id.intersection(is_property) + dependent_references = item_id.intersection(is_being_referenced) + dependencies = dependent_parents.union(dependent_references) + dependencies = dependencies.union(dependent_properties) + + return dependencies + + def delete(self, raise_exception_on_error=True, flags=None, chunk_size=100): + """Delete all entities in this container. + + Entities are identified via their id if present and via their + name otherwise. If any entity has no id and no name a + TransactionError will be raised. + + Note: If only a name is given this could lead to ambiguities. If + this happens, none of them will be deleted. It occurs an error + instead. + """ + item_count = len(self) + # Split Container in 'chunk_size'-sized containers (if necessary) to avoid error 414 Request-URI Too Long + + if item_count > chunk_size: + dependencies = self._test_dependencies_in_container(self) + ''' + If there are as many dependencies as entities in the container and it is larger than chunk_size it cannot be split and deleted. + This case cannot be handled at the moment. + ''' + + if len(dependencies) == item_count: + if raise_exception_on_error: + te = TransactionError( + msg="The container is too large and with too many dependencies within to be deleted.", + container=self) + raise te + + return self + + # items which have to be deleted later because of dependencies. + dependencies_delete = Container() + + for i in range(0, int(item_count/chunk_size)+1): + chunk = Container() + + for j in range(i*chunk_size, min(item_count, (i+1)*chunk_size)): + if len(dependencies): + if self[j].id in dependencies: + dependencies_delete.append(self[j]) + else: + chunk.append(self[j]) + else: + chunk.append(self[j]) + + if len(chunk): + chunk.delete() + if len(dependencies_delete): + dependencies_delete.delete() + + return self + + if len(self) == 0: + if raise_exception_on_error: + te = TransactionError( + msg="There are no entities to be deleted. This container is empty.", + container=self) + raise te + + return self + self.clear_server_messages() + + c = get_connection() + id_str = [] + + for entity in self: + if entity.is_deleted(): + continue + entity._cuid = None + + if entity.id is not None: + id_str.append(str(entity.id)) + elif entity.name is not None: + id_str.append(str(entity.name)) + else: + entity.add_message( + Message( + type="Error", + description="This entity has no identifier. It cannot be deleted.")) + + if raise_exception_on_error: + ee = EntityError( + "This entity has no identifier. It cannot be deleted.", entity) + raise TransactionError(ee) + entity.is_valid = lambda: False + + if len(id_str) == 0: + if raise_exception_on_error: + te = TransactionError( + msg="There are no entities to be deleted.", + container=self) + raise te + + return self + entity_url_segments = [_ENTITY_URI_SEGMENT, "&".join(id_str)] + + _log_request("DELETE: " + str(entity_url_segments) + + ("?" + str(flags) if flags is not None else '')) + + http_response = c.delete(entity_url_segments, query_dict=flags) + cresp = Container._response_to_entities(http_response) + self._sync(cresp, raise_exception_on_error=raise_exception_on_error, + unique=True, strategy=_deletion_sync) + + if raise_exception_on_error: + raise_errors(self) + + return self + + def retrieve(self, query=None, unique=True, + raise_exception_on_error=True, sync=True, flags=None): + """Retrieve all entities in this container identified via their id if + present and via their name otherwise. Any locally already existing + attributes (name, description, ...) will be preserved. Any such + properties and parents will be synchronized as well. They will not be + overridden. This method returns a Container containing the this entity. + + If any entity has no id and no name a LinkAheadException will be raised. + + Note: If only a name is given this could lead to ambiguities. All entities with the name in question + will be returned. Therefore, the container could contain more elements after the retrieval than + before. + """ + + if isinstance(query, list): + self.extend(query) + query = None + cresp = Container() + entities_str = [] + + if query is None: + for entity in self: + if entity.id is not None and entity.id < 0: + entity.id = None + entity.clear_server_messages() + + if entity.id is not None: + entities_str.append(str(entity.id)) + elif entity.name is not None: + entities_str.append(str(entity.name)) + elif entity.path is not None: + # fetch by path (files only) + cresp.extend(execute_query( + "FIND FILE . STORED AT \"" + str(entity.path) + "\"", unique=False)) + else: + entity.add_message( + Message( + type="Error", + description="This entity has no identifier. It cannot be retrieved.")) + + if raise_exception_on_error: + ee = EntityError( + "This entity has no identifier. It cannot be retrieved.", + entity) + raise TransactionError(ee) + entity.is_valid = lambda: False + else: + entities_str.append(str(query)) + + self.clear_server_messages() + cresp2 = self._retrieve(entities=entities_str, flags=flags) + cresp.extend(cresp2) + cresp.messages.extend(cresp2.messages) + + if raise_exception_on_error: + raise_errors(cresp) + + if sync: + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + return self + else: + return cresp + + @staticmethod + def _split_uri_string(entities): + + # get half length of entities_str + hl = len(entities) // 2 + + # split in two uris + + return (entities[0:hl], entities[hl:len(entities)]) + + def _retrieve(self, entities, flags): + c = get_connection() + try: + _log_request("GET: " + _ENTITY_URI_SEGMENT + str(entities) + + ('' if flags is None else "?" + str(flags))) + http_response = c.retrieve( + entity_uri_segments=[ + _ENTITY_URI_SEGMENT, str( + "&".join(entities))], query_dict=flags) + + return Container._response_to_entities(http_response) + except HTTPURITooLongError as uri_e: + try: + # split up + uri1, uri2 = Container._split_uri_string(entities) + except ValueError as val_e: + raise uri_e from val_e + c1 = self._retrieve(entities=uri1, flags=flags) + c2 = self._retrieve(entities=uri2, flags=flags) + c1.extend(c2) + c1.messages.extend(c2.messages) + + return c1 + + def clear_server_messages(self): + self.messages.clear_server_messages() + + for entity in self: + entity.clear_server_messages() + + return self + + @staticmethod + def _dir_to_http_parts(root, d, upload): # @ReservedAssignment + ret = [] + x = (root + '/' + d if d is not None else root) + + for f in listdir(x): + if isdir(x + '/' + f): + part = MultipartParam( + name=hex(randint(0, sys.maxsize)), value="") + part.filename = upload + \ + ('/' + d + '/' if d is not None else '/') + f + '/' + ret.extend(Container._dir_to_http_parts( + root, (d + '/' + f if d is not None else f), upload)) + else: + part = MultipartParam.from_file( + paramname=hex(randint(0, sys.maxsize)), filename=x + '/' + f) + part.filename = upload + \ + ('/' + d + '/' if d is not None else '/') + f + ret.append(part) + + return ret + + def update(self, strict=False, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + """Update these entites.""" + + if len(self) < 1: + te = TransactionError( + msg="There are no entities to be updated. This container is empty.", + container=self) + raise te + + self.clear_server_messages() + insert_xml = etree.Element("Update") + http_parts = [] + + if flags is None: + flags = {} + + if strict is True: + flags["strict"] = "true" + + if unique is True: + flags["uniquename"] = "true" + + for entity in self: + if (entity.id is None or entity.id < 0): + ee = EntityError( + "You tried to update an entity without a valid id.", + entity) + raise TransactionError(ee) + + self._linearize() + + for entity in self: + + # process files if present + Container._process_file_if_present_and_add_to_http_parts( + http_parts, entity) + + for entity in self: + entity_xml = entity.to_xml() + + if hasattr(entity, '_upload') and entity._upload is not None: + entity_xml.set("upload", entity._upload) + elif hasattr(entity, 'pickup') and entity.pickup is not None: + entity_xml.set("pickup", entity.pickup) + + insert_xml.append(entity_xml) + + _log_request("PUT: " + _ENTITY_URI_SEGMENT + + ('' if flags is None else "?" + str(flags)), insert_xml) + + con = get_connection() + + if http_parts is not None and len(http_parts) > 0: + http_parts.insert( + 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) + body, headers = multipart_encode(http_parts) + + http_response = con.update( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + query_dict=flags, + body=body, + headers=headers) + else: + http_response = con.update( + entity_uri_segment=[_ENTITY_URI_SEGMENT], query_dict=flags, + body=xml2str(insert_xml)) + + cresp = Container._response_to_entities(http_response) + + if raise_exception_on_error: + raise_errors(cresp) + + if sync: + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + return self + else: + return cresp + + @staticmethod + def _process_file_if_present_and_add_to_http_parts(http_parts, entity): + if isinstance(entity, File) and hasattr( + entity, 'file') and entity.file is not None: + new_checksum = File._get_checksum(entity.file) + + # do not transfer unchanged files. + + if entity._checksum is not None and entity._checksum.lower() == new_checksum.lower(): + entity._upload = None + + return + + entity._size = None + entity._checksum = new_checksum + entity._upload = hex(randint(0, sys.maxsize)) + + if hasattr(entity.file, "name"): + _file = entity.file.name + else: + _file = entity.file + + if isdir(_file): + http_parts.extend( + Container._dir_to_http_parts(_file, None, entity._upload)) + part = MultipartParam( + name=hex(randint(0, sys.maxsize)), value="") + part.filename = entity._upload + '/' + else: + part = MultipartParam.from_file( + paramname=hex(randint(0, sys.maxsize)), filename=_file) + part.filename = entity._upload + http_parts.append(part) + + if entity.thumbnail is not None: + part = MultipartParam.from_file(paramname=hex( + randint(0, sys.maxsize)), filename=entity.thumbnail) + part.filename = entity._upload + ".thumbnail" + http_parts.append(part) + else: + entity._checksum = None + + def insert(self, strict=False, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + """Insert this file entity into LinkAhead. A successful insertion will + generate a new persistent ID for this entity. This entity can be + identified, retrieved, updated, and deleted via this ID until it has + been deleted. + + If the insertion fails, a LinkAheadException will be raised. The server will have returned at + least one error-message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some insertions might cause warning-messages on the server-side, but the entities are inserted + anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from inserting this entity if any warning occurs. + + @param strict=False: Flag for strict mode. + @param sync=True: synchronize this container with the response from the server. Otherwise, + this method returns a new container with the inserted entities and leaves this container untouched. + """ + + self.clear_server_messages() + insert_xml = etree.Element("Insert") + http_parts = [] + + if flags is None: + flags = {} + + if strict: + flags["strict"] = "true" + + if unique: + flags["uniquename"] = "true" + + self._linearize() + + # TODO: This is a possible solution for ticket#137 +# retrieved = Container() +# for entity in self: +# if entity.is_valid(): +# retrieved.append(entity) +# if len(retrieved)>0: +# retrieved = retrieved.retrieve(raise_exception_on_error=False, sync=False) +# for e_remote in retrieved: +# if e_remote.id is not None: +# try: +# self.get_entity_by_id(e_remote.id).is_valid=e_remote.is_valid +# continue +# except KeyError: +# pass +# if e_remote.name is not None: +# try: +# self.get_entity_by_name(e_remote.name).is_valid=e_remote.is_valid +# continue +# except KeyError: +# pass + for entity in self: + if entity.is_valid(): + continue + + # process files if present + Container._process_file_if_present_and_add_to_http_parts( + http_parts, entity) + + for entity in self: + if entity.is_valid(): + continue + entity_xml = entity.to_xml() + + if hasattr(entity, '_upload') and entity._upload is not None: + entity_xml.set("upload", entity._upload) + elif hasattr(entity, 'pickup') and entity.pickup is not None: + entity_xml.set("pickup", entity.pickup) + insert_xml.append(entity_xml) + + if len(self) > 0 and len(insert_xml) < 1: + te = TransactionError( + msg="There are no entities to be inserted. This container contains existent entities only.", + container=self) + raise te + _log_request("POST: " + _ENTITY_URI_SEGMENT + + ('' if flags is None else "?" + str(flags)), insert_xml) + + con = get_connection() + + if http_parts is not None and len(http_parts) > 0: + http_parts.insert( + 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) + + body, headers = multipart_encode(http_parts) + http_response = con.insert( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + body=body, + headers=headers, + query_dict=flags) + else: + http_response = con.insert( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + body=xml2str(insert_xml), + query_dict=flags) + + cresp = Container._response_to_entities(http_response) + + if sync: + + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + if raise_exception_on_error: + raise_errors(self) + + return self + else: + if raise_exception_on_error: + raise_errors(cresp) + + return cresp + + @staticmethod + def _get_smallest_tmpid(entity): + tmpid = 0 + + if entity.id is not None: + tmpid = min(tmpid, int(entity.id)) + + for p in entity.get_parents(): + if p.id is not None: + tmpid = min(tmpid, int(p.id)) + + for p in entity.get_properties(): + if p.id is not None: + tmpid = min(tmpid, Container._get_smallest_tmpid(p)) + + return tmpid + + def _linearize(self): + tmpid = 0 + ''' users might already have specified some tmpids. -> look for smallest.''' + + for e in self: + tmpid = min(tmpid, Container._get_smallest_tmpid(e)) + + tmpid -= 1 + + '''a tmpid for every entity''' + + for e in self: + if e.id is None: + e.id = tmpid + tmpid -= 1 + + # CUID + + if e._cuid is None or e._cuid == 'None' or e._cuid == '': + e._cuid = str(e.id) + "--" + str(uuid()) + + '''dereference properties and parents''' + + for e in self: + """properties.""" + + for p in e.get_properties(): + if p.id is None: + if p.name is not None: + # TODO using try except for normal execution flow is bad style + try: + w = self.get_entity_by_name(p.name) + p._wrap(w) + except KeyError: + pass + + '''parents''' + + for p in e.get_parents(): + if p.id is None: + if p.name is not None: + # TODO using try except for normal execution flow is bad style + try: + p._wrap(self.get_entity_by_name(p.name)) + except KeyError: + pass + + return self + + def get_property_values(self, *selectors): + """ Return a list of tuples with values of the given selectors. + + I.e. a tabular representation of the container's content. + + If the elements of the selectors parameter are tuples, they will return + the properties of the referenced entity, if present. E.g. ("window", + "height") will return the value of the height property of the + referenced window entity. + + All tuples of the returned list have the same length as the selectors + parameter and the ordering of the tuple's values correspond to the + order of the parameter as well. + + The tuple contains None for all values that are not available in the + entity. That does not necessarily mean, that the values are not stored + in the database (e.g. if a single entity was retrieved without + referenced entities). + + Parameters + ---------- + *selectors : str or tuple of str + Each selector is a list or tuple of property names, e.g. `"height", + "width"`. + + Returns + ------- + table : list of tuples + A tabular representation of the container's content. + """ + table = [] + + for e in self: + table.append(e.get_property_values(*selectors)) + + return table + + +def sync_global_acl(): + c = get_connection() + http_response = c.retrieve(entity_uri_segments=["EntityPermissions"]) + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + if xml.tag.lower() == "response": + for child in xml: + if child.tag == "EntityPermissions": + Permissions.known_permissions = Permissions(child) + + for pelem in child: + if pelem.tag == "EntityACL": + ACL.global_acl = ACL(xml=pelem) + else: + raise LinkAheadException( + "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") + + +def get_known_permissions(): + if Permissions.known_permissions is None: + sync_global_acl() + + return Permissions.known_permissions + + +def get_global_acl(): + if ACL.global_acl is None: + sync_global_acl() + + return ACL.global_acl + + +class ACI(): + def __init__(self, realm, username, role, permission): + self.role = role + self.username = username + self.realm = realm + self.permission = permission + + def __hash__(self): + return hash(self.__repr__()) + + def __eq__(self, other): + return isinstance(other, ACI) and (self.role is None and self.username == other.username and self.realm == + other.realm) or self.role == other.role and self.permission == other.permission + + def __repr__(self): + return str(self.realm) + ":" + str(self.username) + ":" + str(self.role) + ":" + str(self.permission) + + def add_to_element(self, e): + if self.role is not None: + e.set("role", self.role) + else: + e.set("username", self.username) + + if self.realm is not None: + e.set("realm", self.realm) + p = etree.Element("Permission") + p.set("name", self.permission) + e.append(p) + + +class ACL(): + + global_acl = None + + def __init__(self, xml=None): + if xml is not None: + self.parse_xml(xml) + else: + self.clear() + + def parse_xml(self, xml): + """Clear this ACL and parse the xml. + + Iterate over the rules in the xml and add each rule to this ACL. + + Contradicting rules will both be kept. + + Parameters + ---------- + xml : lxml.etree.Element + The xml element containing the ACL rules, i.e. <Grant> and <Deny> + rules. + """ + self.clear() + self._parse_xml(xml) + + def _parse_xml(self, xml): + """Parse the xml. + + Iterate over the rules in the xml and add each rule to this ACL. + + Contradicting rules will both be kept. + + Parameters + ---------- + xml : lxml.etree.Element + The xml element containing the ACL rules, i.e. <Grant> and <Deny> + rules. + """ + # @review Florian Spreckelsen 2022-03-17 + for e in xml: + role = e.get("role") + username = e.get("username") + realm = e.get("realm") + priority = e.get("priority") + + for p in e: + if p.tag == "Permission": + permission = p.get("name") + + if e.tag == "Grant": + self.grant(username=username, realm=realm, role=role, + permission=permission, priority=priority, + revoke_denial=False) + elif e.tag == "Deny": + self.deny(username=username, realm=realm, role=role, + permission=permission, priority=priority, + revoke_grant=False) + + def combine(self, other): + """ Combine and return new instance.""" + result = ACL() + result._grants.update(other._grants) + result._grants.update(self._grants) + result._denials.update(other._denials) + result._denials.update(self._denials) + result._priority_grants.update(other._priority_grants) + result._priority_grants.update(self._priority_grants) + result._priority_denials.update(other._priority_denials) + result._priority_denials.update(self._priority_denials) + + return result + + def __eq__(self, other): + return isinstance(other, ACL) and other._grants == self._grants and self._denials == other._denials and self._priority_grants == other._priority_grants and self._priority_denials == other._priority_denials + + def is_empty(self): + return len(self._grants) + len(self._priority_grants) + \ + len(self._priority_denials) + len(self._denials) == 0 + + def clear(self): + self._grants = set() + self._denials = set() + self._priority_grants = set() + self._priority_denials = set() + + def _get_boolean_priority(self, priority): + return str(priority).lower() in ["true", "1", "yes", "y"] + + def _remove_item(self, item, priority): + try: + self._denials.remove(item) + except KeyError: + pass + try: + self._grants.remove(item) + except KeyError: + pass + + if priority: + try: + self._priority_denials.remove(item) + except KeyError: + pass + try: + self._priority_grants.remove(item) + except KeyError: + pass + + def revoke_grant(self, username=None, realm=None, + role=None, permission=None, priority=False): + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + + if priority: + if item in self._priority_grants: + self._priority_grants.remove(item) + + if item in self._grants: + self._grants.remove(item) + + def revoke_denial(self, username=None, realm=None, + role=None, permission=None, priority=False): + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + + if priority: + if item in self._priority_denials: + self._priority_denials.remove(item) + + if item in self._denials: + self._denials.remove(item) + + def grant(self, permission, username=None, realm=None, role=None, + priority=False, revoke_denial=True): + """Grant a permission to a user or role. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing denial rule would be revoked, because + otherwise this grant wouldn't have any effect. However, for keeping + contradicting rules pass revoke_denial=False. + + Parameters + ---------- + permission: str + The permission to be granted. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is granted with priority over non-priority + rules. + revoke_denial: bool, default True + Whether a contradicting denial (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + if revoke_denial: + self._remove_item(item, priority) + + if priority is True: + self._priority_grants.add(item) + else: + self._grants.add(item) + + def deny(self, username=None, realm=None, role=None, + permission=None, priority=False, revoke_grant=True): + """Deny a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing grant rule would be revoked, because + otherwise this denial would override the grant rules anyways. However, + for keeping contradicting rules pass revoke_grant=False. + + Parameters + ---------- + permission: str + The permission to be denied. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is denied with priority over non-priority + rules. + revoke_grant: bool, default True + Whether a contradicting grant (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + if revoke_grant: + self._remove_item(item, priority) + + if priority is True: + self._priority_denials.add(item) + else: + self._denials.add(item) + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element("EntityACL") + + for aci in self._grants: + e = etree.Element("Grant") + e.set("priority", "False") + aci.add_to_element(e) + xml.append(e) + + for aci in self._denials: + e = etree.Element("Deny") + e.set("priority", "False") + aci.add_to_element(e) + xml.append(e) + + for aci in self._priority_grants: + e = etree.Element("Grant") + e.set("priority", "True") + aci.add_to_element(e) + xml.append(e) + + for aci in self._priority_denials: + e = etree.Element("Deny") + e.set("priority", "True") + aci.add_to_element(e) + xml.append(e) + + return xml + + def get_acl_for_role(self, role): + ret = ACL() + + for aci in self._grants: + if aci.role == role: + ret._grants.add(aci) + + for aci in self._denials: + if aci.role == role: + ret._denials.add(aci) + + for aci in self._priority_grants: + if aci.role == role: + ret._priority_grants.add(aci) + + for aci in self._priority_denials: + if aci.role == role: + ret._priority_denials.add(aci) + + return ret + + def get_acl_for_user(self, username, realm=None): + ret = ACL() + + for aci in self._grants: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._grants.add(aci) + + for aci in self._denials: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._denials.add(aci) + + for aci in self._priority_grants: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._priority_grants.add(aci) + + for aci in self._priority_denials: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._priority_denials.add(aci) + + return ret + + def get_permissions_for_user(self, username, realm=None): + acl = self.get_acl_for_user(username, realm) + _grants = set() + + for aci in acl._grants: + _grants.add(aci.permission) + _denials = set() + + for aci in acl._denials: + _denials.add(aci.permission) + _priority_grants = set() + + for aci in acl._priority_grants: + _priority_grants.add(aci.permission) + _priority_denials = set() + + for aci in acl._priority_denials: + _priority_denials.add(aci.permission) + + return ((_grants - _denials) | _priority_grants) - _priority_denials + + def get_permissions_for_role(self, role): + acl = self.get_acl_for_role(role) + _grants = set() + + for aci in acl._grants: + _grants.add(aci.permission) + _denials = set() + + for aci in acl._denials: + _denials.add(aci.permission) + _priority_grants = set() + + for aci in acl._priority_grants: + _priority_grants.add(aci.permission) + _priority_denials = set() + + for aci in acl._priority_denials: + _priority_denials.add(aci.permission) + + return ((_grants - _denials) | _priority_grants) - _priority_denials + + def is_permitted(self, role, permission): + return permission in self.get_permissions_for_role(role) + + def __repr__(self): + return xml2str(self.to_xml()) + + +class Query(): + """Query + + Attributes + ---------- + q : str + The query string. + flags : dict of str + A dictionary of flags to be send with the query request. + messages : Messages() + A container of messages included in the last query response. + cached : bool + indicates whether the server used the query cache for the execution of + this query. + results : int or Container + The number of results (when this was a count query) or the container + with the resulting entities. + """ + + def putFlag(self, key, value=None): + self.flags[key] = value + + return self + + def removeFlag(self, key): + return self.flags.pop(key) + + def getFlag(self, key): + return self.flags.get(key) + + def __init__(self, q): + self.flags = dict() + self.messages = Messages() + self.cached = None + self.etag = None + + if isinstance(q, etree._Element): + self.q = q.get("string") + self.results = int(q.get("results")) + + if q.get("cached") is None: + self.cached = False + else: + self.cached = q.get("cached").lower() == "true" + self.etag = q.get("etag") + + for m in q: + if m.tag.lower() == 'warning' or m.tag.lower() == 'error': + self.messages.append(_parse_single_xml_element(m)) + else: + self.q = q + + def _query_request(self, query_dict): + """Used internally to execute the query request...""" + _log_request("GET Entity?" + str(query_dict), None) + connection = get_connection() + http_response = connection.retrieve( + entity_uri_segments=["Entity"], + query_dict=query_dict) + cresp = Container._response_to_entities(http_response) + return cresp + + def _paging_generator(self, first_page, query_dict, page_length): + """Used internally to create a generator of pages instead instead of a + container which contais all the results.""" + if len(first_page) == 0: + return # empty page + yield first_page + index = page_length + while self.results > index: + query_dict["P"] = f"{index}L{page_length}" + next_page = self._query_request(query_dict) + etag = next_page.query.etag + if etag is not None and etag != self.etag: + raise PagingConsistencyError("The database state changed while retrieving the pages") + yield next_page + index += page_length + + def execute(self, unique=False, raise_exception_on_error=True, cache=True, + page_length=None): + """Execute a query (via a server-requests) and return the results. + + Parameters + ---------- + + unique : bool + Whether the query is expected to have only one entity as result. + Defaults to False. + raise_exception_on_error : bool + Whether an exception should be raises when there are errors in the + resulting entities. Defaults to True. + cache : bool + Whether to use the server-side query cache (equivalent to adding a + "cache" flag) to the Query object. Defaults to True. + page_length : int + Whether to use paging. If page_length > 0 this method returns a + generator (to be used in a for-loop or with list-comprehension). + The generator yields containers with up to page_length entities. + Otherwise, paging is disabled, as well as for count queries and + when unique is True. Defaults to None. + + Raises: + ------- + PagingConsistencyError + If the database state changed between paged requests. + + Yields + ------ + page : Container + Returns a container with the next `page_length` resulting entities. + + Returns + ------- + results : Container or integer + Returns an integer when it was a `COUNT` query. Otherwise, returns a + Container with the resulting entities. + """ + flags = self.flags + + if cache is False: + flags["cache"] = "false" + query_dict = dict(flags) + query_dict["query"] = str(self.q) + + has_paging = False + is_count_query = self.q.split()[0].lower() == "count" if len(self.q.split()) > 0 else False + + if not unique and not is_count_query and page_length is not None and page_length > 0: + has_paging = True + query_dict["P"] = f"0L{page_length}" + + # retreive first/only page + cresp = self._query_request(query_dict) + + self.results = cresp.query.results + self.cached = cresp.query.cached + self.etag = cresp.query.etag + + if is_count_query: + return self.results + + if raise_exception_on_error: + raise_errors(cresp) + + if unique: + if len(cresp) > 1 and raise_exception_on_error: + raise QueryNotUniqueError( + "Query '{}' wasn't unique.".format(self.q)) + + if len(cresp) == 0 and raise_exception_on_error: + raise EmptyUniqueQueryError( + "Query '{}' found no results.".format(self.q)) + + if len(cresp) == 1: + r = cresp[0] + r.messages.extend(cresp.messages) + + return r + self.messages = cresp.messages + + if has_paging: + return self._paging_generator(cresp, query_dict, page_length) + else: + return cresp + + +def execute_query(q, unique=False, raise_exception_on_error=True, cache=True, + flags=None, page_length=None): + """Execute a query (via a server-requests) and return the results. + + Parameters + ---------- + + q : str + The query string. + unique : bool + Whether the query is expected to have only one entity as result. + Defaults to False. + raise_exception_on_error : bool + Whether an exception should be raised when there are errors in the + resulting entities. Defaults to True. + cache : bool + Whether to use the query server-side cache (equivalent to adding a + "cache" flag). Defaults to True. + flags : dict of str + Flags to be added to the request. + page_length : int + Whether to use paging. If page_length > 0 this method returns a + generator (to be used in a for-loop or with list-comprehension). + The generator yields containers with up to page_length entities. + Otherwise, paging is disabled, as well as for count queries and + when unique is True. Defaults to None. + + Raises: + ------- + PagingConsistencyError + If the database state changed between paged requests. + + Yields + ------ + page : Container + Returns a container with the next `page_length` resulting entities. + + Returns + ------- + results : Container or integer + Returns an integer when it was a `COUNT` query. Otherwise, returns a + Container with the resulting entities. + """ + query = Query(q) + + if flags is not None: + query.flags = flags + + return query.execute(unique=unique, + raise_exception_on_error=raise_exception_on_error, + cache=cache, page_length=page_length) + + +class DropOffBox(list): + def __init__(self, *args, **kwargs): + warn(DeprecationWarning( + "The DropOffBox is deprecated and will be removed in future.")) + super().__init__(*args, **kwargs) + + path = None + + def sync(self): + c = get_connection() + _log_request("GET: Info") + http_response = c.retrieve(["Info"]) + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + for child in xml: + if child.tag.lower() == "stats": + infoelem = child + + break + + for child in infoelem: + if child.tag.lower() == "dropoffbox": + dropoffboxelem = child + + break + del self[:] + self.path = dropoffboxelem.get('path') + + for f in dropoffboxelem: + self.append(f.get('path')) + + return self + + +class UserInfo(): + + def __init__(self, xml): + self.roles = [role.text for role in xml.findall("Roles/Role")] + self.name = xml.get("username") + self.realm = xml.get("realm") + + +class Info(): + + def __init__(self): + self.messages = Messages() + self.sync() + + def sync(self): + c = get_connection() + try: + http_response = c.retrieve(["Info"]) + except LinkAheadConnectionError as conn_e: + print(conn_e) + + return + + xml = etree.fromstring(http_response.read()) + + for e in xml: + m = _parse_single_xml_element(e) + + if isinstance(m, UserInfo): + self.user_info = m + elif isinstance(m, TimeZone): + self.time_zone = m + else: + self.messages.append(m) + + def __str__(self): + if "Counts" not in [m.type for m in self.messages]: + return "linkahead.Info" + + if int(self.messages["counts"]["records"]) > 0: + return "Connection to LinkAhead with {} Records." .format( + self.messages["counts"]["records"] + ) + else: + return "Connection to LinkAhead without Records." + + def __repr__(self): + return self.__str__() + + +class Permission(): + + def __init__(self, name, description=None): + self.name = name + self.description = description + + def __repr__(self): + return str(self) + + def __str__(self): + return self.name + + def __eq__(self, p): + if isinstance(p, Permission): + return p.name == self.name + + return False + + def __hash__(self): + return hash(self.name) + + +class Permissions(): + + known_permissions = None + + def __init__(self, xml): + self.parse_xml(xml) + + def clear(self): + self._perms = set() + + def parse_xml(self, xml): + self.clear() + + for e in xml: + if e.tag == "Permission": + self._perms.add(Permission(name=e.get("name"), + description=e.get("description"))) + + def __contains__(self, p): + if isinstance(p, Permission): + return p in self._perms + else: + return Permission(name=p) in self._perms + + def __repr__(self): + return str(self) + + def __str__(self): + return str(self._perms) + + +def parse_xml(xml): + """parse a string or tree representation of an xml document to a set of + entities (records, recordtypes, properties, or files). + + @param xml: a string or tree representation of an xml document. + @return: list of entities or single entity. + """ + + if isinstance(xml, etree._Element): + elem = xml + else: + elem = etree.fromstring(xml) + + return _parse_single_xml_element(elem) + + +def _parse_single_xml_element(elem): + classmap = { + 'record': Record, + 'recordtype': RecordType, + 'property': Property, + 'file': File, + 'parent': Parent, + 'entity': Entity} + + if elem.tag.lower() in classmap: + klass = classmap.get(elem.tag.lower()) + entity = klass() + Entity._from_xml(entity, elem) + + return entity + elif elem.tag.lower() == "version": + return Version.from_xml(elem) + elif elem.tag.lower() == "state": + return State.from_xml(elem) + elif elem.tag.lower() == "emptystring": + return "" + elif elem.tag.lower() == "value": + if len(elem) == 1 and elem[0].tag.lower() == "emptystring": + return "" + elif len(elem) == 1 and elem[0].tag.lower() in classmap: + return _parse_single_xml_element(elem[0]) + elif elem.text is None or elem.text.strip() == "": + return None + + return str(elem.text.strip()) + elif elem.tag.lower() == "querytemplate": + return QueryTemplate._from_xml(elem) + elif elem.tag.lower() == 'query': + return Query(elem) + elif elem.tag.lower() == 'history': + return Message(type='History', description=elem.get("transaction")) + elif elem.tag.lower() == 'stats': + counts = elem.find("counts") + + return Message(type="Counts", description=None, body=counts.attrib) + elif elem.tag == "EntityACL": + return ACL(xml=elem) + elif elem.tag == "Permissions": + return Permissions(xml=elem) + elif elem.tag == "UserInfo": + return UserInfo(xml=elem) + elif elem.tag == "TimeZone": + return TimeZone(zone_id=elem.get("id"), offset=elem.get("offset"), + display_name=elem.text.strip()) + else: + return Message(type=elem.tag, code=elem.get( + "code"), description=elem.get("description"), body=elem.text) + + +def _evaluate_and_add_error(parent_error, ent): + """Evaluate the error message(s) attached to entity and add a + corresponding exception to parent_error. + + Parameters: + ----------- + parent_error : TransactionError + Parent error to which the new exception will be attached. This + exception will be a direct child. + ent : Entity + Entity that caused the TransactionError. An exception is + created depending on its error message(s). + + Returns: + -------- + TransactionError : + Parent error with new exception(s) attached to it. + + """ + + if isinstance(ent, (Entity, QueryTemplate)): + # Check all error messages + found114 = False + found116 = False + + for err in ent.get_errors(): + # Evaluate specific EntityErrors depending on the error + # code + + if err.code is not None: + if int(err.code) == 101: # ent doesn't exist + new_exc = EntityDoesNotExistError(entity=ent, + error=err) + elif int(err.code) == 110: # ent has no data type + new_exc = EntityHasNoDatatypeError(entity=ent, + error=err) + elif int(err.code) == 403: # no permission + new_exc = AuthorizationError(entity=ent, + error=err) + elif int(err.code) == 152: # name wasn't unique + new_exc = UniqueNamesError(entity=ent, error=err) + elif int(err.code) == 114: # unqualified properties + found114 = True + new_exc = UnqualifiedPropertiesError(entity=ent, + error=err) + + for prop in ent.get_properties(): + new_exc = _evaluate_and_add_error(new_exc, + prop) + elif int(err.code) == 116: # unqualified parents + found116 = True + new_exc = UnqualifiedParentsError(entity=ent, + error=err) + + for par in ent.get_parents(): + new_exc = _evaluate_and_add_error(new_exc, + par) + else: # General EntityError for other codes + new_exc = EntityError(entity=ent, error=err) + else: # No error code causes a general EntityError, too + new_exc = EntityError(entity=ent, error=err) + parent_error.add_error(new_exc) + # Check for possible errors in parents and properties that + # weren't detected up to here + + if not found114: + dummy_err = EntityError(entity=ent) + + for prop in ent.get_properties(): + dummy_err = _evaluate_and_add_error(dummy_err, prop) + + if dummy_err.errors: + parent_error.add_error(dummy_err) + + if not found116: + dummy_err = EntityError(entity=ent) + + for par in ent.get_parents(): + dummy_err = _evaluate_and_add_error(dummy_err, par) + + if dummy_err.errors: + parent_error.add_error(dummy_err) + + elif isinstance(ent, Container): + parent_error.container = ent + + if ent.get_errors() is not None: + parent_error.code = ent.get_errors()[0].code + # In the highly unusual case of more than one error + # message, attach all of them. + parent_error.msg = '\n'.join( + [x.description for x in ent.get_errors()]) + # Go through all container elements and add them: + + for elt in ent: + parent_error = _evaluate_and_add_error(parent_error, elt) + + else: + raise TypeError("Parameter ent is to be an Entity or a Container") + + return parent_error + + +def raise_errors(arg0): + """Raise a TransactionError depending on the error code(s) inside + Entity, QueryTemplate or Container arg0. More detailed errors may + be attached to the TransactionError depending on the contents of + arg0. + + Parameters: + ----------- + arg0 : Entity, QueryTemplate, or Container + LinkAhead object whose messages are evaluated according to their + error codes + + """ + transaction_error = _evaluate_and_add_error(TransactionError(), + arg0) + # Raise if any error was found + + if len(transaction_error.all_errors) > 0: + raise transaction_error + # Cover the special case of an empty container with error + # message(s) (e.g. query syntax error) + + if (transaction_error.container is not None and + transaction_error.container.has_errors()): + raise transaction_error + + +def delete(ids, raise_exception_on_error=True): + c = Container() + + if isinstance(ids, list) or isinstance(ids, range): + for i in ids: + c.append(Entity(id=i)) + else: + c.append(Entity(id=ids)) + + return c.delete(raise_exception_on_error=raise_exception_on_error) diff --git a/src/linkahead/common/models.py.orig b/src/linkahead/common/models.py.orig new file mode 100644 index 0000000000000000000000000000000000000000..cd95cc37e0e0911f3f0c42d71bbd422da1c02bcb --- /dev/null +++ b/src/linkahead/common/models.py.orig @@ -0,0 +1,4873 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020-2023 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2020-2023 Florian Spreckelsen <f.spreckelsen@indiscale.com> +# Copyright (C) 2020-2022 Timm Fitschen <t.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# + +""" +Collection of the central classes of the LinkAhead client, namely the Entity class +and all of its subclasses and the Container class which is used to carry out +transactions. + +All additional classes are either important for the entities or the +transactions. +""" + +from __future__ import annotations # Can be removed with 3.10. +from __future__ import print_function, unicode_literals + +import re +import sys +from builtins import str +from copy import deepcopy +from functools import cmp_to_key +from hashlib import sha512 +from os import listdir +from os.path import isdir +from random import randint +from tempfile import NamedTemporaryFile +from typing import Any, Optional +from warnings import warn + +from lxml import etree + +from ..configuration import get_config +from ..connection.connection import get_connection +from ..connection.encode import MultipartParam, multipart_encode +from ..exceptions import (AmbiguousEntityError, AuthorizationError, + ConsistencyError, EmptyUniqueQueryError, + EntityDoesNotExistError, EntityError, + EntityHasNoDatatypeError, HTTPURITooLongError, + LinkAheadConnectionError, LinkAheadException, + MismatchingEntitiesError, PagingConsistencyError, + QueryNotUniqueError, TransactionError, + UniqueNamesError, UnqualifiedParentsError, + UnqualifiedPropertiesError) +from .datatype import (BOOLEAN, DATETIME, DOUBLE, INTEGER, TEXT, + is_list_datatype, is_reference) +from .state import State +from .timezone import TimeZone +from .utils import uuid, xml2str +from .versioning import Version + +_ENTITY_URI_SEGMENT = "Entity" + +# importances/inheritance +OBLIGATORY = "OBLIGATORY" +SUGGESTED = "SUGGESTED" +RECOMMENDED = "RECOMMENDED" +FIX = "FIX" +ALL = "ALL" +NONE = "NONE" + + +SPECIAL_ATTRIBUTES = ["name", "role", "datatype", "description", + "id", "path", "checksum", "size"] + + +class Entity: + + """Entity is a generic LinkAhead object. + + The majority of all methods of the derived classes (e.g. Record, + RecordType, Property ...) are defined here, e.g. add_property, + add_parent, retrieve ... Each entity may have some attributes (id, + name, description, ...), a set of properties, a set of parent + entities and a set of messages which are generated through the + processing in the client library or the server, or which can be used + by the user to control several server-side plug-ins. + """ + + def __init__(self, name=None, id=None, description=None, # @ReservedAssignment + datatype=None, value=None, **kwargs): + self.__role = kwargs["role"] if "role" in kwargs else None + self._checksum = None + self._size = None + self._upload = None + # If an entity is used (e.g. as parent), it is wrapped instead of being used directly. + # see Entity._wrap() + self._wrapped_entity = None + self._version = None + self._cuid = None + self._flags = dict() + self.__value = None + self.__datatype = None + self.datatype = datatype + self.value = value + self.messages = Messages() + self.properties = _Properties() + self.parents = _ParentList() + self.path = None + self.file = None + self.unit = None + self.acl = None + self.permissions = None + self.is_valid = lambda: False + self.is_deleted = lambda: False + self.name = name + self.description = description + self.id = id + self.state = None + + def copy(self): + """ + Return a copy of entity. + + If deep == True return a deep copy, recursively copying all sub entities. + + Standard properties are copied using add_property. + Special attributes, as defined by the global variable SPECIAL_ATTRIBUTES and additionaly + the "value" are copied using setattr. + """ + if self.role == "File": + new = File() + elif self.role == "Property": + new = Property() + elif self.role == "RecordType": + new = RecordType() + elif self.role == "Record": + new = Record() + elif self.role == "Entity": + new = Entity() + else: + raise RuntimeError("Unkonwn role.") + + # Copy special attributes: + # TODO: this might rise an exception when copying + # special file attributes like checksum and size. + for attribute in SPECIAL_ATTRIBUTES + ["value"]: + val = getattr(self, attribute) + if val is not None: + setattr(new, attribute, val) + + # Copy parents: + for p in self.parents: + new.add_parent(p) + + # Copy properties: + for p in self.properties: + new.add_property(p, importance=self.get_importance(p)) + + return new + + @property + def version(self): + if self._version is not None or self._wrapped_entity is None: + return self._version + + return self._wrapped_entity.version + + @version.setter + def version(self, version): + self._version = version + + @property + def role(self): + return self.__role + + @role.setter + def role(self, role): + if role is not None and role.lower() == "entity": + self.__role = None + else: + self.__role = role + + @property + def size(self): + if self._size is not None: + return int(self._size) + + if self._wrapped_entity is None: + return None + + return self._wrapped_entity.size + + @property + def id(self): + if self.__id is not None: + return self.__id + + if self._wrapped_entity is None: + return None + + return self._wrapped_entity.id + + @id.setter + def id(self, new_id): + if new_id is not None: + self.__id = int(new_id) + else: + self.__id = None + + @property + def name(self): + if self.__name is not None or self._wrapped_entity is None: + return self.__name + + return self._wrapped_entity.name + + @name.setter + def name(self, new_name): + self.__name = new_name + + @property + def datatype(self): + if self.__datatype is not None or self._wrapped_entity is None: + return self.__datatype + + return self._wrapped_entity.datatype + + @datatype.setter + def datatype(self, new_type): + # re-parse value + self.__value = _parse_value(new_type, self.__value) + self.__datatype = new_type + + @property + def description(self): + if self.__description is not None or self._wrapped_entity is None: + return self.__description + + return self._wrapped_entity.description + + @property + def checksum(self): + return self._checksum + + @description.setter + def description(self, new_description): + self.__description = new_description + + @property + def unit(self): + if self.__unit is not None or self._wrapped_entity is None: + return self.__unit + + return self._wrapped_entity.unit + + @unit.setter + def unit(self, new_unit): + self.__unit = new_unit + + @property + def value(self): + if self.__value is not None or self._wrapped_entity is None: + return self.__value + + return self._wrapped_entity.value + + @value.setter + def value(self, new_value): + self.__value = _parse_value(self.datatype, new_value) + + @property + def path(self): + if self.__path is not None or self._wrapped_entity is None: + return self.__path + + return self._wrapped_entity.path + + @path.setter + def path(self, new_path): + self.__path = new_path + + @property + def thumbnail(self): + if self.__thumbnail is not None or self._wrapped_entity is None: + return self.__thumbnail + + return self._wrapped_entity.thumbnail + + @thumbnail.setter + def thumbnail(self, new_thumbnail): + self.__thumbnail = new_thumbnail + + @property + def file(self): + if self.__file is not None or self._wrapped_entity is None: + return self.__file + + return self._wrapped_entity.file + + @file.setter + def file(self, new_file): + self.__file = new_file + + @property + def pickup(self): + if self.__pickup is not None or self._wrapped_entity is None: + return self.__pickup + + return self._wrapped_entity.pickup + + @pickup.setter + def pickup(self, new_pickup): + self.__pickup = new_pickup + + def grant(self, realm=None, username=None, role=None, + permission=None, priority=False, revoke_denial=True): + """Grant a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing denial rule would be revoked, because + otherwise this grant wouldn't have any effect. However, for keeping + contradicting rules pass revoke_denial=False. + + Parameters + ---------- + permission: str + The permission to be granted. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is granted with priority over non-priority + rules. + revoke_denial: bool, default True + Whether a contradicting denial (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + self.acl.grant(realm=realm, username=username, role=role, + permission=permission, priority=priority, + revoke_denial=revoke_denial) + + def deny(self, realm=None, username=None, role=None, + permission=None, priority=False, revoke_grant=True): + """Deny a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing grant rule would be revoked, because + otherwise this denial would override the grant rules anyways. However, + for keeping contradicting rules pass revoke_grant=False. + + Parameters + ---------- + permission: str + The permission to be denied. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is denied with priority over non-priority + rules. + revoke_grant: bool, default True + Whether a contradicting grant (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + self.acl.deny(realm=realm, username=username, role=role, + permission=permission, priority=priority, + revoke_grant=revoke_grant) + + def revoke_denial(self, realm=None, username=None, + role=None, permission=None, priority=False): + self.acl.revoke_denial( + realm=realm, + username=username, + role=role, + permission=permission, + priority=priority) + + def revoke_grant(self, realm=None, username=None, + role=None, permission=None, priority=False): + self.acl.revoke_grant( + realm=realm, + username=username, + role=role, + permission=permission, + priority=priority) + + def is_permitted(self, permission, role=None): + if role is None: + # pylint: disable=unsupported-membership-test + + return permission in self.permissions + else: + self.acl.is_permitted(permission=permission) + + def get_all_messages(self): + ret = Messages() + ret.append(self.messages) + + for p in self.properties: + ret.extend(p.get_all_messages()) + + for p in self.parents: + ret.extend(p.get_all_messages()) + + return ret + + def clear_server_messages(self): + self.messages.clear_server_messages() + + for p in self.properties: + p.clear_server_messages() + + for p in self.parents: + p.clear_server_messages() + + return self + + def get_importance(self, property): # @ReservedAssignment + """Get the importance of a given property regarding this entity.""" + + if self.properties is not None: + return self.properties.get_importance(property) + + def remove_property(self, property): # @ReservedAssignment + self.properties.remove(property) + + return self + + def remove_value_from_property(self, property_name: str, value: Any, + remove_if_empty_afterwards: Optional[bool] = True): + """Remove a value from a property given by name. + + Do nothing if this entity does not have a property of this + ``property_name`` or if the property value is different of the given + ``value``. By default, the property is removed from this entity if it + becomes empty (i.e., value=None) through removal of the value. This + behavior can be changed by setting ``remove_if_empty_afterwards`` to + ``False`` in which case the property remains. + + Notes + ----- + If the property value is a list and the value to be removed occurs more + than once in this list, only its first occurrance is deleted (similar + to the behavior of Python's ``list.remove()``.) + + If the property was empty (prop.value == None) before, the property is + not removed afterwards even if ``remove_if_empty_afterwards`` is set to + ``True``. Rationale: the property being empty is not an effect of + calling this function. + + Parameters + ---------- + property_name : str + Name of the property from which the ``value`` will be removed. + + value + Value that is to be removed. + + remove_if_empty_afterwards : bool, optional + Whether the property shall be removed from this entity if it is + emptied by removing the ``value``. Default is ``True``. + + Returns + ------- + self + This entity. + + """ + + if self.get_property(property_name) is None: + return self + if self.get_property(property_name).value is None: + remove_if_empty_afterwards = False + empty_afterwards = False + if isinstance(self.get_property(property_name).value, list): + if value in self.get_property(property_name).value: + self.get_property(property_name).value.remove(value) + if self.get_property(property_name).value == []: + self.get_property(property_name).value = None + empty_afterwards = True + elif self.get_property(property_name).value == value: + self.get_property(property_name).value = None + empty_afterwards = True + if remove_if_empty_afterwards and empty_afterwards: + self.remove_property(property_name) + + return self + + def remove_parent(self, parent): + self.parents.remove(parent) + + return self + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=None, inheritance=None): # @ReservedAssignment + """Add a property to this entity. + + The first parameter is meant to identify the property entity either via + its id or name, or by providing the corresponding ``Entity`` Python + object. The second parameter is the value of the new property. Any other + named parameter may be passed by means of the keywwords. Accepted + keywords are: id, name, description, importance, inheritance, datatype, + and unit. + + Notes + ----- + If you want to add a property to an already existing entity, the + property ``id`` of that property needs to be specified before you send + the updated entity to the server. + + Parameters + ---------- + property : int, str, Entity, optional + An identifier for the property to be added, either its name, its id, + or the corresponding Entity Python object. If ``None``, either the + `name` or the `id` argument have to be specified explicitly. Default + is ``None``. + value : int, str, bool, datetime, Entity, or list of these types, optional + The value of the new property. In case of a reference to another + entity, this value may be the referenced entities id or the + ``Entity`` as a Python object. Default is None. + id : int, optional + Id of the property, by default None + name : str, optional + Name of the property, by default None + description : str, optional + Description of the property, by default None + datatype : str, optional + Datatype of the property, by default None + unit : str, optional + Unit of the property, by default None + importance :str, optional + Importance of the property, by default None + inheritance : str, optional + Inheritance of the property, by default None + + Returns + ------- + Entity + This Entity object to which the new property has been added. + + Warns + ----- + UserWarning + If the first parameter is None then id or name must be defined and not be None. + UserWarning + If the first parameter is an integer then it is interpreted as the id and id must be + undefined or None. + UserWarning + If the first parameter is not None and neither an instance of Entity nor an integer it is + interpreted as the name and name must be undefined or None. + + Raises + ------ + ValueError: + If you try to add an ``Entity`` object with File or Record role (or, + equivalently, a ``File`` or ``Record`` object) as a property, a + ``ValueError`` is raised. + + Examples + -------- + Add a simple integer property with the name ``TestProp`` and the value + 27 to a Record: + + >>> import linkahead as db + >>> rec = db.Record(name="TestRec").add_parent(name="TestType") + >>> rec.add_property("TestProp", value=27) # specified by name, you could equally use the property's id if it is known + + You can also use the Python object: + + >>> prop = db.Property(name="TestProp", datatype=db.INTEGER) + >>> rec.add_property(prop, value=27) # specified via the Python object + + In case of updating an existing Record, the Property needs to be + specified by id: + + >>> rec = db.Record(name="TestRec").retrieve() + >>> prop2 = db.Property(name="OtherTestProp").retrieve() + >>> rec.add_property(id=prop2.id, value="My new value") + >>> rec.update() + + Let's look at the more advanced example of adding a list of integers as + value of the above integer ``TestProp``: + + >>> rec.add_property("TestProp", value=[27,28,29], datatype=db.LIST(db.INTEGER)) + + Note that since `TestProp` is a scalar integer Property, the datatype + `LIST<INTEGER>` has to be specified explicitly. + + Finally, we can also add reference properties, specified by the RecordType of the referenced entity. + + >>> ref_rec = db.Record(name="ReferencedRecord").add_parent(name="OtherRT") + >>> rec.add_property(name="OtherRT", value=ref_rec) # or value=ref_rec.id if ref_rec has one set by the server + + See more on adding properties and inserting data in + https://docs.indiscale.com/caosdb-pylib/tutorials/Data-Insertion.html. + + """ + + pid = id + abstract_property = None + + if isinstance(property, Entity): + if property.role is not None and property.role.lower() in ["record", "file"]: + raise ValueError("The property parameter is a {0}. This " + "is very unusual and probably not what you " + "want. Otherwise, construct a property from " + "a {0} using the Property class and add " + "that to this entity.".format(property.role)) + abstract_property = property + elif isinstance(property, int): + if pid is not None: + raise UserWarning("The first parameter was an integer which would normally be interpreted as the id of the property which is to be added. But you have also specified a parameter 'id' in the method call. This is ambiguous and cannot be processed.") + pid = property + id = pid + elif property is not None: + if name is not None: + raise UserWarning("The first parameter was neither an instance of Entity nor an integer. Therefore the string representation of your first parameter would normally be interpreted name of the property which is to be added. But you have also specified a parameter 'name' in the method call. This is ambiguous and cannot be processed.") + name = str(property) + + if property is None and name is None and pid is None: + raise UserWarning( + "This method expects you to pass at least an entity, a name or an id.") + + new_property = Property(name=name, id=id, description=description, datatype=datatype, + value=value, unit=unit) + + if abstract_property is not None: + new_property._wrap(abstract_property) + + # FIXME: this really necessary? + + if new_property.datatype is None and isinstance( + property, (RecordType, Record, File)): + new_property.datatype = property + new_property.value = value + + self.properties.append( + property=new_property, importance=importance, inheritance=inheritance) + + return self + + def add_message(self, msg=None, type=None, code=None, # @ReservedAssignment + description=None, body=None): + """Add a message (msg) to this entity. If and only if no msg is given + this method will created a new message from the parameters type, code, + description, and body. + + @param msg: The message to be added to this entity. + @param type: The type of the message to be added. + @param code: The code of the message to be added. + @param description: The description of the message to be added. + @param body: The body of the message to be added. + """ + + if msg is not None: + pass + else: + msg = Message(description=description, type=type, code=code, + body=body) + self.messages.append(msg) + + return self + + def add_parent(self, parent=None, id=None, name=None, inheritance=None): # @ReservedAssignment + """Add a parent to this entity. + + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + Notes + ----- + Note that the behaviour of the `inheritance` argument currently has not + yet been specified when assigning parents to Records, it only works for + inheritance of RecordTypes (and Properties). For more information, it is + recommended to look into the :ref:`data insertion + tutorial<tutorial-inheritance-properties>`. + + Raises + ------ + UserWarning + If neither a `parent` parameter, nor the `id`, nor `name` + parameter is passed to this method. + + """ + + pid = id + parent_entity = None + + if isinstance(parent, Entity): + parent_entity = parent + elif isinstance(parent, int): + pid = parent + elif parent is not None: + name = str(parent) + + if pid is None and name is None and parent_entity is None: + raise UserWarning( + "This method expects you to pass at least an entity, a name or an id.") + + addp = Parent(id=pid, name=name, inheritance=inheritance) + + if parent_entity is not None: + addp._wrap(parent_entity) + self.parents.append(addp) + + return self + + def has_parent(self, parent: Entity, recursive: bool = True, retrieve: bool = True, + check_name: bool = True, check_id: bool = False): + """Check if this entity has a given parent. + + If 'check_name' and 'check_id' are both False, test for identity + on the Python level. Otherwise use the name and/or ID for the + check. Note that, if checked, name or ID should not be None, + lest the check fail. + +Parameters +---------- + +parent: Entity + Check for this parent. + +recursive: bool, optional + Whether to check recursively. + +check_name: bool, optional + Whether to use the name for ancestry check. + +check_id: bool, optional + Whether to use the ID for ancestry check. + +retrieve: bool, optional + If False, do not retrieve parents from the server. + +Returns +------- +out: bool + True if ``parent`` is a true parent, False otherwise. +""" + + if recursive: + parents = self.get_parents_recursively(retrieve=retrieve) + else: + if retrieve: + parents = [pp.retrieve()._wrapped_entity for pp in self.parents] + else: + parents = [pp._wrapped_entity for pp in self.parents] + + if not (check_name or check_id): + return parent in parents + + name_result = ( + not check_name or + (parent.name is not None and + parent.name in [pp.name for pp in parents])) + id_result = ( + not check_id or + (parent.id is not None and + parent.id in [pp.id for pp in parents])) + + return name_result and id_result + + def get_parents(self): + """Get all parents of this entity. + + @return: _ParentList(list) + """ + + return self.parents + + def get_parents_recursively(self, retrieve: bool = True): + """Get all ancestors of this entity. + +Parameters +---------- + +retrieve: bool, optional + If False, do not retrieve parents from the server. + +Returns +------- +out: List[Entity] + The parents of this Entity +""" + + all_parents = [] + self._get_parent_recursively(all_parents, retrieve=retrieve) + + return all_parents + + def _get_parent_recursively(self, all_parents: list, retrieve: bool = True): + """Get all ancestors with a little helper. + + As a side effect of this method, the ancestors are added to + all_parents. + + @param all_parents: list, The added parents so far. + + @return: None, but see side effects. + """ + + for parent in self.parents: + # TODO: + # Comment on _wrap and _wrapped_entity + # Currently, I (henrik) do not why the wrapping is necessary (and it is not + # documented). However, the following illustrates, why I think, it is a bad idea. + # First you add a parent with rec.add_parent(parent), but then you cannot access + # attributes of parent when you use rec.parents[0] for example becasue you do not get + # the same object but a wrapping object and you need to know that you only get the + # original by accessing the private (!) _wrapped_entity object. + w_parent = parent._wrapped_entity + if retrieve: + parent.retrieve() + for next_parent in parent.parents: + w_parent.add_parent(next_parent) + + if (w_parent.id, w_parent.name) not in [ + (all_p.id, all_p.name) for all_p in all_parents]: + all_parents.append(w_parent) + w_parent._get_parent_recursively(all_parents, retrieve=retrieve) + + def get_parent(self, key): + """Return the first parent matching the key or None if no match exists. + + Parameters + --------- + key : int or Enity or str + The id, Entity, or name of the parent that should be + returned. If an Entity is given, its id or its name is + used to find a matching parent. + + Returns + ------- + parent : Entity + The first parent of this entity that matches the given id, + entity, or name. + + """ + + if isinstance(key, int): + for p in self.parents: + if p.id is not None and int(p.id) == int(key): + return p + elif isinstance(key, Entity): + if key.id is not None: + # first try by id + found = self.get_parent(int(key.id)) + + if found is not None: + return found + # otherwise by name + + return self.get_parent(key.name) + else: + for p in self.parents: + if (p.name is not None + and str(p.name).lower() == str(key).lower()): + + return p + + return None + + def get_properties(self): + """Get all properties of this entity. + + @return: _Properties(list) + """ + + return self.properties + + def get_property(self, pattern): + """ Return the first matching property or None. + + Parameters + ---------- + pattern : str or int or Entity + The name or id to look for (case-insensitive) or an Entity where + the name or id is used to match the properites of this instance. + + Returns + ------- + property : Property + The first Property of this Entity with a matching name or id. + + """ + # entity given + + if (hasattr(pattern, "name") or hasattr(pattern, "id")): + # only return if a result was found, otherwise use id + + if (hasattr(pattern, "name") and pattern.name is not None + and self.get_property(pattern.name) is not None): + + return self.get_property(pattern.name) + + if hasattr(pattern, "id") and pattern.id is not None: + return self.get_property(pattern.id) + + # int given + elif isinstance(pattern, int): + for p in self.properties: + if p.id is not None and int(p.id) == int(pattern): + return p + # str given + elif isinstance(pattern, str): + for p in self.properties: + if (p.name is not None + and str(p.name).lower() == str(pattern).lower()): + + return p + else: + raise ValueError("argument should be entity, int , string") + + return None + + def _get_value_for_selector(self, selector): + """return the value described by the selector + + A selector is a list or a tuple of strings describing a path in an + entity tree with self as root. The last selector may be a special one + like unit or name. + + See also get_property_values() + """ + SPECIAL_SELECTORS = ["unit", "value", "description", "id", "name"] + + if not isinstance(selector, (tuple, list)): + selector = [selector] + + ref = self + + # there are some special selectors which can be applied to the + # final element; if such a special selector exists we split it + # from the list + + if selector[-1].lower() in SPECIAL_SELECTORS: + special_selector = selector[-1] + selector = selector[:-1] + else: + special_selector = None + + # iterating through the entity tree according to the selector + + for subselector in selector: + # selector does not match the structure, we cannot get a + # property of non-entity + + if not isinstance(ref, Entity): + return None + + prop = ref.get_property(subselector) + + # selector does not match the structure, we did not get a + # property + + if prop is None: + return None + + # if the property is a reference, we are interested in the + # corresponding entities attributes + + if isinstance(prop.value, Entity): + ref = prop.value + + # otherwise in the attributes of the property + else: + ref = prop + + # if we saved a special selector before, apply it + + if special_selector is None: + return prop.value + else: + return getattr(ref, special_selector.lower()) + + def get_property_values(self, *selectors): + """ Return a tuple with the values described by the given selectors. + + This represents an entity's properties as if it was a row of a table + with the given columns. + + If the elements of the selectors parameter are tuples, they will return + the properties of the referenced entity, if present. E.g. ("window", + "height") will return the value of the height property of the + referenced window entity. + + The tuple's values correspond to the order of selectors parameter. + + The tuple contains None for all values that are not available in the + entity. That does not necessarily mean, that the values are not stored + in the database (e.g. if a single entity was retrieved without + referenced entities). + + Parameters + ---------- + *selectors : str or tuple of str + Each selector is a list or tuple of property names, e.g. `"height", + "width"`. + + Returns + ------- + row : tuple + A row-like representation of the entity's properties. + """ + row = tuple() + + for selector in selectors: + val = self._get_value_for_selector(selector) + + if isinstance(val, Entity): + val = val.id if val.id is not None else val.name + row += (val,) + + return row + + def get_messages(self): + """Get all messages of this entity. + + @return: Messages(list) + """ + + return self.messages + + def get_warnings(self): + """Get all warning messages of this entity. + + @return Messages(list): Warning messages. + """ + ret = Messages() + + for m in self.messages: + if m.type.lower() == "warning": + ret.append(m) + + return ret + + def get_errors(self): + """Get all error messages of this entity. + + @return Messages(list): Error messages. + """ + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + if self._wrapped_entity is not None: + ret.extend(self._wrapped_entity.get_errors()) + + return ret + + def get_errors_deep(self, roots=None): + """Get all error messages of this entity and all sub-entities / + parents / properties. + + @return A list of tuples. Tuple index 0 contains the error message + and tuple index 1 contains the tree. + """ + roots = [] if roots is None else roots + result_list = list() + ret_self = self.get_errors() + result_list.extend([ + (m, roots) for m in ret_self]) + + for parent in self.get_parents(): + result_list.extend( + parent.get_errors_deep( + roots + [parent])) + + return result_list + + def has_errors(self): + ''' + @return True: if and only if this entities has any error messages. + ''' + + for m in self.messages: + if m.type.lower() == "error": + return True + + return False + + def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): + """Generate an xml representation of this entity. If the parameter xml + is given, all attributes, parents, properties, and messages of this + entity will be added to it instead of creating a new element. + + Raise an error if xml is not a lxml.etree.Element + + @param xml: an xml element to which all attributes, parents, + properties, and messages + are to be added. + @return: xml representation of this entity. + """ + + if xml is None: + # use role as xml tag name, fall-back to "Entity" + elem_tag = "Entity" if self.role is None else self.role + xml = etree.Element(elem_tag) + assert isinstance(xml, etree._Element) + + # unwrap wrapped entity + + if self._wrapped_entity is not None: + xml = self._wrapped_entity.to_xml(xml, add_properties) + + if self.id is not None: + xml.set("id", str(self.id)) + + if self._cuid is not None: + xml.set("cuid", str(self._cuid)) + + if self.name is not None: + xml.set("name", str(self.name)) + + if self.description is not None: + xml.set("description", str(self.description)) + + if self.version is not None: + xml.append(self.version.to_xml()) + + if self.value is not None: + if isinstance(self.value, Entity): + if self.value.id is not None: + xml.text = str(self.value.id) + elif self.value.name is not None: + xml.text = str(self.value.name) + else: + xml.text = str(self.value) + elif isinstance(self.value, list): + for v in self.value: + v_elem = etree.Element("Value") + + if isinstance(v, Entity): + if v.id is not None: + v_elem.text = str(v.id) + elif v.name is not None: + v_elem.text = str(v.name) + else: + v_elem.text = str(v) + elif v == "": + v_elem.append(etree.Element("EmptyString")) + elif v is None: + pass + else: + v_elem.text = str(v) + xml.append(v_elem) + elif self.value == "": + xml.append(etree.Element("EmptyString")) + elif str(self.value) == "nan": + xml.text = "NaN" + else: + xml.text = str(self.value) + + if self.datatype is not None: + if isinstance(self.datatype, Entity): + if self.datatype.id is not None: + xml.set("datatype", str(self.datatype.id)) + elif self.datatype.name is not None: + xml.set("datatype", str(self.datatype.name)) + else: + xml.set("datatype", str(self.datatype)) + else: + xml.set("datatype", str(self.datatype)) + + if self.path is not None: + xml.set("path", self.path) + + if self.file is not None and local_serialization: + xml.set("file", self.file) + + if self._checksum is not None: + xml.set("checksum", self._checksum) + + if self.size is not None: + xml.set("size", str(self.size)) + + if self.unit is not None: + xml.set("unit", str(self.unit)) + + if self.messages is not None: + self.messages.to_xml(xml) + + if self.parents is not None: + self.parents.to_xml(xml) + + if self.properties is not None: + self.properties.to_xml(xml, add_properties) + + if len(self._flags) > 0: + flagattr = "" + + for key in self._flags.keys(): + flag = self._flags[key] + + if flag is not None and flag != "": + flagattr += str(key) + ":" + str(flag) + "," + else: + flagattr += str(key) + "," + xml.set("flag", flagattr) + + if self.acl is not None: + xml.append(self.acl.to_xml()) + + if self.state is not None: + xml.append(self.state.to_xml()) + + return xml + + @staticmethod + def _from_xml(entity, elem): + """Parse a single string representation of an xml element to an entity. + + @param entity: the entity + @param elem: the xml element + """ + + if isinstance(entity, Entity): + entity.role = elem.tag + entity._cuid = elem.get("cuid") + entity.id = elem.get("id") # @ReservedAssignment + entity.name = elem.get("name") + entity.description = elem.get("description") + entity.path = elem.get("path") + entity._checksum = elem.get("checksum") + entity._size = elem.get("size") + entity.datatype = elem.get("datatype") # @ReservedAssignment + entity.unit = elem.get("unit") + entity.file = elem.get("file") + + if hasattr(entity, "affiliation"): + entity.affiliation = elem.get("affiliation") + + vals = list() + + for celem in elem: + + child = _parse_single_xml_element(celem) + + if isinstance(child, Property): + entity.properties.append(property=child, + importance=celem.get("importance"), + inheritance=None) + elif isinstance(child, Parent): + entity.add_parent(child) + elif isinstance(child, ACL): + entity.acl = child + elif isinstance(child, Permissions): + entity.permissions = child + elif isinstance(child, Message): + entity.add_message(child) + elif isinstance(child, Version): + entity.version = child + elif isinstance(child, State): + entity.state = child + elif child is None or hasattr(child, "encode"): + vals.append(child) + elif isinstance(child, Entity): + vals.append(child) + else: + raise TypeError( + 'Child was neither a Property, nor a Parent, nor a Message.\ + Was ' + str(type(child)) + "\n" + str(child)) + + # add VALUE + value = None + + if vals: + # The value[s] have been inside a <Value> tag. + value = vals + elif elem.text is not None and elem.text.strip() != "": + value = elem.text.strip() + + try: + entity.value = value + except ValueError: + # circumvent the parsing. + entity.__value = value + + return entity + + def __repr__(self): + return xml2str(self.to_xml()) + + def retrieve_acl(self): + self.acl = Entity(name=self.name, id=self.id).retrieve( + flags={"ACL": None}).acl + + def update_acl(self): + if self.id is None: + c = Container().retrieve(query=self.name, sync=False) + + if len(c) == 1: + e = c[0] + elif len(c) == 0: + ee = EntityDoesNotExistError( + "The entity to be updated does not exist on the server.", + entity=self + ) + raise TransactionError(ee) + else: + ae = AmbiguousEntityError( + "Could not determine the desired Entity which is to be updated by its name.", + entity=self + ) + raise TransactionError(ae) + else: + e = Container().retrieve(query=self.id, sync=False)[0] + e.acl = ACL(self.acl.to_xml()) + e.update() + + return e + + def delete(self, raise_exception_on_error=True): + return Container().append(self).delete( + raise_exception_on_error=raise_exception_on_error)[0] + + def retrieve(self, unique=True, raise_exception_on_error=True, flags=None): + """Retrieve this entity identified via its id if present and via its + name otherwise. Any locally already existing attributes (name, + description, ...) will be preserved. Any such properties and parents + will be synchronized as well. They will not be overridden. This method + returns a Container containing the this entity. + + Note: If only a name is given this could lead to ambiguities. Usually + this would raise a LinkAheadException. Set the flag 'unique' to False if + this Exception should be suppressed. If unique is False this method + returns a Container object which carries the returned entities. They are + distinct from this one. This entity will no be changed somehow. + + @param unique=True: flag to suppress the ambiguity exception. + + @return + Container with the returned entities or single entity if and only + if unique was True and no exception was raised. + + """ + + if unique: + c = Container().append(self).retrieve( + unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) + + if len(c) == 1: + c[0].messages.extend(c.messages) + + return c[0] + + raise QueryNotUniqueError("This retrieval was not unique!!!") + + return Container().append(self).retrieve( + unique=unique, raise_exception_on_error=raise_exception_on_error, flags=flags) + + def insert(self, raise_exception_on_error=True, unique=True, + sync=True, strict=False, flags=None): + """Insert this entity into a LinkAhead server. A successful insertion will + generate a new persistent ID for this entity. This entity can be + identified, retrieved, updated, and deleted via this ID until it has + been deleted. + + If the insertion fails, a LinkAheadException will be raised. The server will have returned at + least one error-message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some insertions might cause warning-messages on the server-side, but the entities are inserted + anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from inserting this entity if any warning occurs. + + Parameters + ---------- + strict : bool, optional + Flag for strict mode. Default is False. + raise_exception_on_error : bool, optional + Flag to raise an exception when an error occurs. Default is True. + unique : bool, optional + Flag to only allow insertion of elements with unique names. Default + is True. + flags : dict, optional + A dictionary of flags to be send with the insertion. Default is + None. + + """ + + return Container().append(self).insert( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def update(self, strict=False, raise_exception_on_error=True, + unique=True, flags=None, sync=True): + """Update this entity. + +There are two possible work-flows to perform this update: +First: + 1) retrieve an entity + 2) do changes + 3) call update method + +Second: + 1) construct entity with id + 2) call update method. + + For slight changes the second one it is more comfortable. Furthermore, it is possible to stay + off-line until calling the update method. The name, description, unit, datatype, path, + and value of an entity may be changed. Additionally, properties, parents and messages may be added. + + However, the first one is more powerful: It is possible to delete and change properties, parents + and attributes, which is not possible via the second one for internal reasons (which are reasons + of definiteness). + + If the update fails, a LinkAheadException will be raised. The server will have returned at + least one error message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some updates might cause warning messages on the server-side, but the updates are performed + anyway. Set flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from updating this entity if any warnings occur. + + @param strict=False: Flag for strict mode. + """ + + return Container().append(self).update( + strict=strict, + sync=sync, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + flags=flags)[0] + + def _wrap(self, entity): + """ + When entity shall be used as parent or property it is not added to the corresponding list + (such as the parent list) directly, but another Entity object is created and the original + Entity is wrapped using this function + TODO: document here and in dev docs why this is done. + """ + self._wrapped_entity = entity + + return self + + def set_flag(self, key, value=None): + self._flags[key] = value + + return self + + +def _parse_value(datatype, value): + """Parse the value (from XML input) according to the given datatype + """ + + # Simple values + if value is None: + return value + + if datatype is None: + return value + + if datatype == DOUBLE: + return float(value) + + if datatype == INTEGER: + return int(str(value)) + + if datatype == BOOLEAN: + if str(value).lower() == "true": + return True + elif str(value).lower() == "false": + return False + else: + raise ValueError("Boolean value was {}.".format(value)) + + # Datetime and text are returned as-is + if datatype in [DATETIME, TEXT]: + if isinstance(value, str): + return value + + # deal with collections + if isinstance(datatype, str): + matcher = re.compile(r"^(?P<col>[^<]+)<(?P<dt>[^>]+)>$") + m = matcher.match(datatype) + + if m: + col = m.group("col") + dt = m.group("dt") + + if col == "LIST": + ret = list() + else: + return value + + if hasattr(value, "__iter__") and not isinstance(value, str): + for v in value: + ret.append(_parse_value(dt, v)) + else: + # put a single value into a list since the datatype says so. + ret.append(_parse_value(dt, value)) + + return ret + + # This is for a special case, where the xml parser could not differentiate + # between single values and lists with one element. As + if hasattr(value, "__len__") and len(value) == 1: + return _parse_value(datatype, value[0]) + + # deal with references + if isinstance(value, Entity): + return value + + if isinstance(value, str) and "@" in value: + # probably this is a versioned reference + + return str(value) + else: + # for unversioned references + try: + return int(value) + except ValueError: + # reference via name + + return str(value) + except TypeError as te: + # deal with invalid XML: List of values without appropriate datatype + if isinstance(value, list): + raise TypeError( + "Invalid datatype: List valued properties must be announced by " + "the datatype.\n" + f"Datatype: {datatype}\nvalue: {value}") + else: + # Everything else that's not related to wrong list assignments + raise te + + +def _log_request(request, xml_body=None): + if Container._debug() > 0: + print("\n" + request) + + if xml_body is not None: + print("======== Request body ========\n") + print(xml2str(xml_body)) + print("\n==============================\n") + + +def _log_response(body): + if Container._debug() > 0: + print("\n======== Response body ========\n") + print(body.decode()) + print("\n===============================\n") + + +class QueryTemplate(): + + def __init__(self, id=None, name=None, query=None, description=None): # @ReservedAssignment + + self.id = (int(id) if id is not None else None) + self.role = "QueryTemplate" + self.name = name + self.description = description + self.query = query + self._cuid = None + self.value = None + self.datatype = None + self.messages = Messages() + self.properties = None + self.parents = None + self.path = None + self.file = None + self._checksum = None + self._size = None + self._upload = None + self.unit = None + self.acl = None + self.permissions = None + self.is_valid = lambda: False + self.is_deleted = lambda: False + self.version = None + self.state = None + + def retrieve(self, raise_exception_on_error=True, unique=True, sync=True, + flags=None): + + return Container().append(self).retrieve( + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def insert(self, strict=True, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + + return Container().append(self).insert( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def update(self, strict=True, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + + return Container().append(self).update( + strict=strict, + raise_exception_on_error=raise_exception_on_error, + unique=unique, + sync=sync, + flags=flags)[0] + + def delete(self, raise_exception_on_error=True): + return Container().append(self).delete( + raise_exception_on_error=raise_exception_on_error)[0] + + def __repr__(self): + return xml2str(self.to_xml()) + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element("QueryTemplate") + + if self.name is not None: + xml.set("name", self.name) + + if self.id is not None: + xml.set("id", str(self.id)) + + if self.description is not None: + xml.set("description", self.description) + + if self.version is not None: + xml.append(self.version.to_xml()) + + if self.query is not None: + queryElem = etree.Element("Query") + queryElem.text = self.query + xml.append(queryElem) + + if self.messages is not None: + self.messages.to_xml(xml) + + if self.acl is not None: + xml.append(self.acl.to_xml()) + + return xml + + @staticmethod + def _from_xml(xml): + if xml.tag.lower() == "querytemplate": + q = QueryTemplate(name=xml.get("name"), + description=xml.get("description"), query=None) + + for e in xml: + if e.tag.lower() == "query": + q.query = e.text + else: + child = _parse_single_xml_element(e) + + if isinstance(child, Message): + q.messages.append(child) + elif isinstance(child, ACL): + q.acl = child + elif isinstance(child, Version): + q.version = child + elif isinstance(child, Permissions): + q.permissions = child + q.id = int(xml.get("id")) + + return q + else: + return None + + def clear_server_messages(self): + self.messages.clear_server_messages() + + def get_parents(self): + return [] + + def get_properties(self): + return [] + + def has_id(self): + return self.id is not None + + def get_errors(self): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + return ret + + def get_messages(self): + return self.messages + + def has_errors(self): + return len(self.get_errors()) > 0 + + +class Parent(Entity): + """The parent entities.""" + + @property + def affiliation(self): + if self.__affiliation is not None or self._wrapped_entity is None: + return self.__affiliation + elif hasattr(self._wrapped_entity, "affiliation"): + return self._wrapped_entity.affiliation + + return + + @affiliation.setter + def affiliation(self, affiliation): + self.__affiliation = affiliation + + def __init__(self, id=None, name=None, description=None, inheritance=None): # @ReservedAssignment + Entity.__init__(self, id=id, name=name, description=description) + + if inheritance is not None: + self.set_flag("inheritance", inheritance) + self.__affiliation = None + + def to_xml(self, xml=None, add_properties=None): + if xml is None: + xml = etree.Element("Parent") + + return super().to_xml(xml=xml, add_properties=add_properties) + + +class _EntityWrapper(object): + pass + + +class _ConcreteProperty(_EntityWrapper): + pass + + +class Property(Entity): + + """LinkAhead's Property object.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def add_parent(self, parent=None, id=None, name=None, inheritance=FIX): + """Add a parent Entity to this Property. + + Parameters + ---------- + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str, default: FIX + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + See Also + -------- + Entity.add_parent + + """ + + return super(Property, self).add_parent(parent=parent, id=id, name=name, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None, datatype=None, + value=None, unit=None): + Entity.__init__(self, id=id, name=name, description=description, + datatype=datatype, value=value, role="Property") + self.unit = unit + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("Property") + + return super(Property, self).to_xml(xml, add_properties) + + def is_reference(self, server_retrieval=False): + """Returns whether this Property is a reference + + Parameters + ---------- + server_retrieval : bool, optional + If True and the datatype is not set, the Property is retrieved from the server, by default False + + Returns + ------- + bool, NoneType + Returns whether this Property is a reference or None if a server call is needed to + check correctly, but server_retrieval is set to False. + + """ + + if self.datatype is None: + + if not self.is_valid(): + # this is a workaround to prevent side effects + # since retrieve currently changes the object + + if server_retrieval: + tmp_prop = deepcopy(self) + """ + remove role to avoid unnessecary ValueError while + retrieving the Entity. + """ + tmp_prop.role = None + tmp_prop.retrieve() + + return tmp_prop.is_reference() + else: + return None + else: + # a valid property without datatype has to be an RT + + return True + else: + return is_reference(self.datatype) + + +class Message(object): + + def __init__(self, type=None, code=None, description=None, body=None): # @ReservedAssignment + self.description = description + self.type = type if type is not None else "Info" + self.code = int(code) if code is not None else None + self.body = body + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element(str(self.type)) + + if self.code is not None: + xml.set("code", str(self.code)) + + if self.description: + xml.set("description", str(self.description)) + + if self.body: + xml.text = str(self.body) + + return xml + + def __repr__(self): + return xml2str(self.to_xml()) + + def __eq__(self, obj): + if isinstance(obj, Message): + return self.type == obj.type and self.code == obj.code and self.description == obj.description + + return False + + def get_code(self): + warn(("get_code is deprecated and will be removed in future. " + "Use self.code instead."), DeprecationWarning) + return int(self.code) + + +class RecordType(Entity): + + """This class represents LinkAhead's RecordType entities.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=RECOMMENDED, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def add_parent(self, parent=None, id=None, name=None, inheritance=OBLIGATORY): + """Add a parent to this RecordType + + Parameters + ---------- + parent : Entity or int or str or None, optional + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + Parameters + ---------- + parent : Entity or int or str or None + The parent entity, either specified by the Entity object + itself, or its id or its name. Default is None. + id : int + Integer id of the parent entity. Ignored if `parent` + is not None. + name : str + Name of the parent entity. Ignored if `parent is not + none`. + inheritance : str, default OBLIGATORY + One of ``obligatory``, ``recommended``, ``suggested``, or ``fix``. Specifies the + minimum importance which parent properties need to have to be inherited by this + entity. If no `inheritance` is given, no properties will be inherited by the child. + This parameter is case-insensitive. + + See Also + -------- + Entity.add_parent + + """ + + return super().add_parent(parent=parent, id=id, name=name, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None, datatype=None): # @ReservedAssignment + Entity.__init__(self, name=name, id=id, description=description, + datatype=datatype, role="RecordType") + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("RecordType") + + return Entity.to_xml(self, xml, add_properties) + + +class Record(Entity): + + """This class represents LinkAhead's Record entities.""" + + def add_property(self, property=None, value=None, id=None, name=None, description=None, datatype=None, + unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + def __init__(self, name=None, id=None, description=None): # @ReservedAssignment + Entity.__init__(self, name=name, id=id, description=description, + role="Record") + + def to_xml(self, xml=None, add_properties=ALL): + if xml is None: + xml = etree.Element("Record") + + return Entity.to_xml(self, xml, add_properties=ALL) + + +class File(Record): + + """This class represents LinkAhead's file entities. + + For inserting a new file to the server, `path` gives the new location, and + (exactly?) one of `file` and `pickup` should (must?) be given to specify the + source of the file. + + Symlinking from the "extroot" file system is not supported by this API yet, + it can be done manually using the `InsertFilesInDir` flag. For sample code, + look at `test_files.py` in the Python integration tests of the + `load_files.py` script in the advanced user tools. + + @param name: A name for this file record (That's an entity name - not to be + confused with the last segment of the files path). + @param id: An ID. + @param description: A description for this file record. + @param path: The complete path, including the file name, of the file in the + server's "caosroot" file system. + @param file: A local path or python file object. The file designated by + this argument will be uploaded to the server via HTTP. + @param pickup: A file/folder in the DropOffBox (the server will move that + file into its "caosroot" file system). + @param thumbnail: (Local) filename to a thumbnail for this file. + @param properties: A list of properties for this file record. @todo is this + implemented? + @param from_location: Deprecated, use `pickup` instead. + + """ + + def __init__(self, name=None, id=None, description=None, # @ReservedAssignment + path=None, file=None, pickup=None, # @ReservedAssignment + thumbnail=None, from_location=None): + Record.__init__(self, id=id, name=name, description=description) + self.role = "File" + self.datatype = None + + # location in the fileserver + self.path = path + + # local file path or pointer to local file + self.file = file + self.thumbnail = thumbnail + + self.pickup = pickup + + if from_location is not None: + warn(DeprecationWarning( + "Param `from_location` is deprecated, use `pickup instead`.")) + + if self.pickup is None: + self.pickup = from_location + + def to_xml(self, xml=None, add_properties=ALL, local_serialization=False): + """Convert this file to an xml element. + + @return: xml element + """ + + if xml is None: + xml = etree.Element("File") + + return Entity.to_xml(self, xml=xml, add_properties=add_properties, + local_serialization=local_serialization) + + def download(self, target=None): + """Download this file-entity's actual file from the file server. It + will be stored to the target or will be hold as a temporary file. + + @param target: Where to store this file. + @return: local path of the downloaded file. + """ + self.clear_server_messages() + + if target: + file_ = open(target, 'wb') + else: + file_ = NamedTemporaryFile(mode='wb', delete=False) + checksum = File.download_from_path(file_, self.path) + + if self._checksum is not None and self._checksum.lower() != checksum.hexdigest().lower(): + raise ConsistencyError( + "The downloaded file had an invalid checksum. Maybe the download did not finish?") + + return file_.name + + @staticmethod + def download_from_path(target_file, path): + + _log_request("GET (download): " + path) + response = get_connection().download_file(path) + + data = response.read(8000) + checksum = sha512() + + while data: + target_file.write(data) + checksum.update(data) + data = response.read(8000) + target_file.close() + + return checksum + + @staticmethod + def _get_checksum(files): + import locale + + if hasattr(files, "name"): + return File._get_checksum_single_file(files.name) + else: + if isdir(files): + checksumappend = "" + + for child in sorted(listdir(files), + key=cmp_to_key(locale.strcoll)): + + if isdir(files + '/' + child): + checksumappend += child + checksumappend += File._get_checksum(files + "/" + child) + checksum = sha512() + checksum.update(checksumappend.encode('utf-8')) + + return checksum.hexdigest() + else: + return File._get_checksum_single_file(files) + + @staticmethod + def _get_checksum_single_file(single_file): + _file = open(single_file, 'rb') + data = _file.read(1000) + checksum = sha512() + + while data: + checksum.update(data) + data = _file.read(1000) + _file.close() + + return checksum.hexdigest() + + def add_property(self, property=None, id=None, name=None, description=None, datatype=None, + value=None, unit=None, importance=FIX, inheritance=FIX): # @ReservedAssignment + """See ``Entity.add_property``.""" + + return super().add_property( + property=property, id=id, name=name, description=description, datatype=datatype, + value=value, unit=unit, importance=importance, inheritance=inheritance) + + +class _Properties(list): + + def __init__(self): + list.__init__(self) + self._importance = dict() + self._inheritance = dict() + self._element_by_name = dict() + self._element_by_id = dict() + + def get_importance(self, property): # @ReservedAssignment + if property is not None: + if hasattr(property, "encode"): + property = self.get_by_name(property) # @ReservedAssignment + + return self._importance.get(property) + + def set_importance(self, property, importance): # @ReservedAssignment + if property is not None: + self._importance[property] = importance + + def get_by_name(self, name): + """Get a property of this list via it's name. Raises a LinkAheadException + if not exactly one property has this name. + + @param name: the name of the property to be returned. + @return: A property + """ + + return self._element_by_name[name] + + def extend(self, parents): + self.append(parents) + + return self + + def append(self, property, importance=None, inheritance=None): # @ReservedAssignment + if isinstance(property, list): + for p in property: + self.append(p, importance, inheritance) + + return + + if isinstance(property, Entity): + if importance is not None: + self._importance[property] = importance + + if inheritance is not None: + self._inheritance[property] = inheritance + else: + self._inheritance[property] = FIX + + if property.id is not None: + self._element_by_id[str(property.id)] = property + + if property.name is not None: + self._element_by_name[property.name] = property + list.append(self, property) + else: + raise TypeError("Argument was not an entity") + + return self + + def to_xml(self, add_to_element, add_properties): + for p in self: + importance = self._importance.get(p) + + if add_properties == FIX and not importance == FIX: + continue + + pelem = p.to_xml(xml=etree.Element("Property"), add_properties=FIX) + + if p in self._importance: + pelem.set("importance", importance) + + if p in self._inheritance: + pelem.set("flag", "inheritance:" + + str(self._inheritance.get(p))) + add_to_element.append(pelem) + + return self + + def __repr__(self): + xml = etree.Element("PropertyList") + self.to_xml(xml, add_properties=FIX) + + return xml2str(xml) + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with that cuid in this container.") + + def remove(self, prop): + if isinstance(prop, Entity): + if prop in self: + list.remove(self, prop) + + return + else: + if prop.id is not None: + # by id + + for e in self: + if e.id is not None and e.id == prop.id: + list.remove(self, e) + + return + + if prop.name is not None: + # by name + + for e in self: + if e.name is not None and e.name == prop.name: + list.remove(self, e) + + return + elif hasattr(prop, "encode"): + # by name + + for e in self: + if e.name is not None and str(e.name) == str(prop): + list.remove(self, e) + + return + elif isinstance(prop, int): + # by id + + for e in self: + if e.id is not None and e.id == prop: + list.remove(self, e) + + return + raise KeyError(str(prop) + " not found.") + + +class _ParentList(list): + # TODO unclear why this class is private. Isn't it use full for users? + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with that cuid in this container.") + + def __init__(self): + list.__init__(self) + self._element_by_name = dict() + self._element_by_id = dict() + + def extend(self, parents): + self.append(parents) + + return self + + def append(self, parent): # @ReservedAssignment + if isinstance(parent, list): + for p in parent: + self.append(p) + + return + + if isinstance(parent, Entity): + if parent.id: + self._element_by_id[str(parent.id)] = parent + + if parent.name: + self._element_by_name[parent.name] = parent + list.append(self, parent) + else: + raise TypeError("Argument was not an Entity") + + return self + + def to_xml(self, add_to_element): + for p in self: + pelem = etree.Element("Parent") + + if p.id is not None: + pelem.set("id", str(p.id)) + + if p._cuid is not None: + pelem.set("cuid", str(p._cuid)) + + if p.name is not None: + pelem.set("name", str(p.name)) + + if p.description is not None: + pelem.set("description", str(p.description)) + + if len(p._flags) > 0: + flagattr = "" + + for key in p._flags.keys(): + flag = p._flags[key] + + if flag is not None and flag != "": + flagattr += str(key) + ":" + str(flag) + "," + else: + flagattr += str(key) + "," + pelem.set("flag", flagattr) + add_to_element.append(pelem) + + def __repr__(self): + xml = etree.Element("ParentList") + self.to_xml(xml) + + return xml2str(xml) + + def remove(self, parent): + if isinstance(parent, Entity): + if parent in self: + list.remove(self, parent) + else: + if parent.id is not None: + # by id + + for e in self: + if e.id is not None and e.id == parent.id: + list.remove(self, e) + + return + + if parent.name is not None: + # by name + + for e in self: + if e.name is not None and e.name == parent.name: + list.remove(self, e) + + return + elif hasattr(parent, "encode"): + # by name + + for e in self: + if e.name is not None and e.name == parent: + list.remove(self, e) + + return + elif isinstance(parent, int): + # by id + + for e in self: + if e.id is not None and e.id == parent: + list.remove(self, e) + + return + raise KeyError(str(parent) + " not found.") + + +class Messages(list): + """This specialization of list stores error, warning, info, and other + messages. The mentioned three messages types play a special role. + They are generated by the client and the server while processing the entity + to which the message in question belongs. It is RECOMMENDED NOT to specify + such messages manually. The other messages are ignored by the server unless + there is a plug-in which interprets them. + + Any message MUST have a type. It MAY have a code (an integer), a description (short string), + or a body (longer string): + + <$Type code=$code description=$description>$body</$Type> + + Error, warning, and info messages will be deleted before any transaction. + + Examples: + <<< msgs = Messages() + + <<< # create Message + <<< msg = Message(type="HelloWorld", code=1, description="Greeting the world", body="Hello, world!") + + <<< # append it to the Messages + <<< msgs.append(msg) + + <<< # use Messages as list of Message objects + <<< for m in msgs: + ... assert isinstance(m,Message) + + <<< # remove it + <<< msgs.remove(msg) + + <<< # ok append it again ... + <<< msgs.append(msg) + <<< # get it back via get(...) and the key tuple (type, code) + <<< assert id(msgs.get("HelloWorld",1))==id(msg) + """ + + def clear_server_messages(self): + """Removes all messages of type error, warning and info. All other + messages types are custom types which should be handled by custom + code.""" + rem = [] + + for m in self: + if m.type.lower() in ["error", "warning", "info"]: + rem.append(m) + + for m in rem: + self.remove(m) + + ####################################################################### + # can be removed after 01.07.24 + # default implementation of list is sufficient + def __setitem__(self, key, value): # @ReservedAssignment + if not isinstance(value, Message): + warn("__setitem__ will in future only accept Message objects as second argument. " + "You will no longe be" + " able to pass bodys such that Message object is created on the fly", + DeprecationWarning) + if not isinstance(key, int): + warn("__setitem__ will in future only accept int as first argument", + DeprecationWarning) + if isinstance(key, tuple): + if len(key) == 2: + type = key[0] # @ReservedAssignment + code = key[1] + elif len(key) == 1: + type = key[0] # @ReservedAssignment + code = None + else: + raise TypeError( + "('type', 'code'), ('type'), or 'type' expected.") + elif isinstance(key, Messages._msg_key): + type = key._type # @ReservedAssignment + code = key._code + else: + type = key # @ReservedAssignment + code = None + + if isinstance(value, tuple): + if len(value) == 2: + description = value[0] + body = value[1] + elif len(value) == 1: + body = value[0] + description = None + else: + raise TypeError( + "('description', 'body'), ('body'), or 'body' expected.") + + if isinstance(value, Message): + body = value.body + description = value.description + m = Message + else: + body = value + description = None + m = Message(type=type, code=code, description=description, body=body) + if isinstance(key, int): + super().__setitem__(key, m) + else: + self.append(m) + + def __getitem__(self, key): + if not isinstance(key, int): + warn("__getitem__ only supports integer keys in future.", DeprecationWarning) + if isinstance(key, tuple): + if len(key) == 2: + type = key[0] # @ReservedAssignment + code = key[1] + elif len(key) == 1: + type = key[0] # @ReservedAssignment + code = None + else: + raise TypeError( + "('type', 'code'), ('type'), or 'type' expected.") + elif isinstance(key, int) and key >= 0: + return super().__getitem__(key) + else: + type = key # @ReservedAssignment + code = None + m = self.get(type, code) + if m is None: + raise KeyError() + if m.description: + return (m.description, m.body) + else: + return m.body + + def __delitem__(self, key): + if isinstance(key, tuple): + warn("__delitem__ only supports integer keys in future.", DeprecationWarning) + if self.get(key[0], key[1]) is not None: + self.remove(self.get(key[0], key[1])) + else: + super().__delitem__(key) + + def remove(self, obj, obj2=None): + if obj2 is not None: + warn("Supplying a second argument to remove is deprecated.", + DeprecationWarning) + super().remove(self.get(obj, obj2)) + else: + super().remove(obj) + + def append(self, msg): + if isinstance(msg, Messages) or isinstance(msg, list): + warn("Supplying a list-like object to append is deprecated. Please use extend" + " instead.", DeprecationWarning) + for m in msg: + self.append(m) + return + + super().append(msg) + + @staticmethod + def _hash(t, c): + return hash(str(t).lower() + (str(",") + str(c) if c is not None else '')) + # end remove + ####################################################################### + + def get(self, type, code=None, default=None, exact=False): # @ReservedAssignment + """ + returns a message from the list that kind of matches type and code + + case and types (str/int) are ignored + + If no suitable message is found, the default argument is returned + If exact=True, the message has to match code and type exactly + """ + if not exact: + warn("The fuzzy mode (exact=False) is deprecated. Please use exact in future.", + DeprecationWarning) + + for msg in self: + if exact: + if msg.type == type and msg.code == code: + return msg + else: + if self._hash(msg.type, msg.code) == self._hash(type, code): + return msg + + return default + + def to_xml(self, add_to_element): + for m in self: + melem = m.to_xml() + add_to_element.append(melem) + + def __repr__(self): + xml = etree.Element("Messages") + self.to_xml(xml) + + return xml2str(xml) + + ####################################################################### + # can be removed after 01.07.24 + class _msg_key: + + def __init__(self, type, code): # @ReservedAssignment + warn("This class is deprecated.", DeprecationWarning) + self._type = type + self._code = code + + @staticmethod + def get(msg): + return Messages._msg_key(msg.type, msg.code) + + def __eq__(self, obj): + return self.__hash__() == obj.__hash__() + + def __hash__(self): + return hash(str(self._type).lower() + (str(",") + str(self._code) + if self._code is not None else '')) + + def __repr__(self): + return str(self._type) + (str(",") + str(self._code) + if self._code is not None else '') + # end remove + ####################################################################### + + +class _Messages(Messages): + def __init__(self, *args, **kwargs): + warn("_Messages is deprecated. " + "Use class Messages instead and beware of the slightly different API of the new" + " Messages class", DeprecationWarning) + super().__init__(*args, **kwargs) + + +def _basic_sync(e_local, e_remote): + '''Copy all state from a one entity to another. + + This method is used to syncronize an entity with a remote (i.e. a newly + retrieved) one. + + Any entity state of the local one will be overriden. + + Parameters + ---------- + e_local : Entity + Destination of the copy. + e_local : Entity + Source of the copy. + + + Returns + ------- + e_local : Entity + The syncronized entity. + ''' + if e_local is None or e_remote is None: + return None + if e_local.role is None: + e_local.role = e_remote.role + elif e_remote.role is not None and not e_local.role.lower() == e_remote.role.lower(): + raise ValueError("The resulting entity had a different role ({0}) " + "than the local one ({1}). This probably means, that " + "the entity was intialized with a wrong class " + "by this client or it has changed in the past and " + "this client did't know about it yet.".format( + e_remote.role, e_local.role)) + + e_local.id = e_remote.id + e_local.name = e_remote.name + e_local.description = e_remote.description + e_local.path = e_remote.path + e_local._checksum = e_remote._checksum + e_local._size = e_remote._size + e_local.datatype = e_remote.datatype + e_local.unit = e_remote.unit + e_local.value = e_remote.value + e_local.properties = e_remote.properties + e_local.parents = e_remote.parents + e_local.messages = e_remote.messages + e_local.acl = e_remote.acl + e_local.permissions = e_remote.permissions + e_local.is_valid = e_remote.is_valid + e_local.is_deleted = e_remote.is_deleted + e_local.version = e_remote.version + e_local.state = e_remote.state + + if hasattr(e_remote, "query"): + e_local.query = e_remote.query + + if hasattr(e_remote, "affiliation"): + e_local.affiliation = e_remote.affiliation + + return e_local + + +def _deletion_sync(e_local, e_remote): + if e_local is None or e_remote is None: + return + + try: + e_remote.get_messages()["info", 10] # try and get the deletion info + except KeyError: + # deletion info wasn't there + e_local.messages = e_remote.messages + + return + + _basic_sync(e_local, e_remote) + e_local.is_valid = lambda: False + e_local.is_deleted = lambda: True + e_local.id = None + + +class Container(list): + """Container is a type safe list for Entities. + + It also provides several short-cuts for transactions like retrieval, + insertion, update, and deletion which are a applied to all entities + in the container or the whole container respectively. + """ + + _debug = staticmethod( + lambda: ( + get_config().getint( + "Container", + "debug") if get_config().has_section("Container") and + get_config().get( + "Container", + "debug") is not None else 0)) + + def is_valid(self): + for e in self: + if not e.is_valid(): + return False + + return True + + def __hash__(self): + return object.__hash__(self) + + def remove(self, entity): + """Remove the first entity from this container which is equal to the + given entity. Raise a ValueError if there is no such entity. + + Alternatively, if the argument is not an entity but an ID, the + contained entity with this ID is removed. + + @param entity: The entity to be removed. + """ + + if entity in self: + super().remove(entity) + else: + for ee in self: + if entity == ee.id: + super().remove(ee) + + return ee + raise ValueError( + "Container.remove(entity): entity not in Container") + + return entity + + def _get_entity_by_cuid(self, cuid): + ''' + Get the first entity which has the given cuid. + Note: this method is intended for internal use. + @param name: The cuid of the entity to be returned. + @return: Entity with the given cuid. + ''' + + for e in self: + if e._cuid is not None: + if str(e._cuid) == str(cuid): + return e + raise KeyError("No entity with such cuid (" + str(cuid) + ")!") + + def get_entity_by_id(self, id): # @ReservedAssignment + """Get the first entity which has the given id. Note: If several + entities are in this list which have the same id, this method will only + return the first and ignore the others. + + @param name: The id of the entity to be returned. + @return: Entity with the given id. + """ + + for e in self: + if e.id: + if e.id == int(id): + return e + raise KeyError("No entity with such id (" + str(id) + ")!") + + def get_all_errors(self): + """Returns a dictionary with all errors from all entities in this + container. + + The dictionary keys are the ids of those entities having + contained an error. + """ + error_list = dict() + + for e in self: + if isinstance(e, Entity): + el = e.get_errors_deep() + + if len(el) > 0: + error_list[str(e.id)] = el + + return error_list + + def get_entity_by_name(self, name, case_sensitive=True): + """Get the first entity which has the given name. Note: If several + entities are in this list which have the same name, this method will + only return the first and ignore the others. + + @param name: the name of the entity to be returned. + @param case_sensitive (True/False): Do a case-sensitive search for name (or not). + @return: Entity with the given name. + """ + + for e in self: + if e.name is not None: + if case_sensitive and e.name == str(name): + return e + elif not case_sensitive and e.name.lower() == str(name).lower(): + return e + raise KeyError("No entity with such name (" + str(name) + ")!") + + def __init__(self): + """Container is a list of entities which can be + inserted/updated/deleted/retrieved at once.""" + list.__init__(self) + self._timestamp = None + self._srid = None + self.messages = Messages() + + def extend(self, entities): + """Extend this Container by appending all single entities in the given + list of entities. + + @param entities: list of entities. + """ + + if isinstance(entities, Container): + for entity in entities: + self.append(entity) + elif isinstance(entities, (list, set)): + for entity in entities: + self.extend(entity) + elif isinstance(entities, Entity): + self.append(entities) + elif isinstance(entities, int): + self.append(entities) + elif hasattr(entities, "encode"): + self.append(entities) + else: + raise TypeError( + "Expected a list or a container (was " + str(type(entities)) + ").") + + return self + + def append(self, entity): + """Append an entity container. + + If the parameter is an integer an entity with the corresponding ID is appended. + If the parameter is a string an entity with the corresponding name is appended. + Raise a TypeError if the entity is not a sub type of the correct class (as defined + via the constructor). + + @param entity: The entity to be appended. + """ + + if isinstance(entity, Entity): + super().append(entity) + elif isinstance(entity, int): + super().append(Entity(id=entity)) + elif hasattr(entity, "encode"): + super().append(Entity(name=entity)) + elif isinstance(entity, QueryTemplate): + super().append(entity) + else: + warn("Entity was neither an id nor a name nor an entity." + + " (was " + str(type(entity)) + ":\n" + str(entity) + ")") + # raise TypeError( + # "Entity was neither an id nor a name nor an entity." + + # " (was " + str(type(entity)) + "\n" + str(entity) + ")") + + return self + + def to_xml(self, add_to_element=None, local_serialization=False): + """Get an xml tree representing this Container or append all entities + to the given xml element. + + @param add_to_element=None: optional element to which all entities of this container is to be appended. + @return xml element + """ + tmpid = 0 + + # users might already have specified some tmpids. -> look for smallest. + + for e in self: + tmpid = min(tmpid, Container._get_smallest_tmpid(e)) + tmpid -= 1 + + if add_to_element is None: + add_to_element = etree.Element("Entities") + + for m in self.messages: + add_to_element.append(m.to_xml()) + + for e in self: + if e.id is None: + e.id = tmpid + tmpid -= 1 + + for e in self: + if isinstance(e, File): + elem = e.to_xml(local_serialization=local_serialization) + else: + elem = e.to_xml() + add_to_element.append(elem) + + return add_to_element + + def get_errors(self): + """Get all error messages of this container. + + @return Messages: Error messages. + """ + + if self.has_errors(): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "error": + ret.append(m) + + return ret + else: + return None + + def get_warnings(self): + """Get all warning messages of this container. + + @return Messages: Warning messages. + """ + + if self.has_warnings(): + ret = Messages() + + for m in self.messages: + if m.type.lower() == "warning": + ret.append(m) + + return ret + else: + return None + + def get_all_messages(self): + ret = Messages() + + for e in self: + ret.extend(e.get_all_messages()) + + return ret + + def add_message(self, m): + self.messages.append(m) + + return self + + def has_warnings(self): + ''' + @return True: if and only if this container has any warning messages. + ''' + + for m in self.messages: + if m.type.lower() == "warning": + return True + + return False + + def has_errors(self): + ''' + @return True: if and only if this container has any error messages. + ''' + + for m in self.messages: + if m.type.lower() == "error": + return True + + return False + + def __str__(self): + return self.__repr__() + + def __repr__(self): + return xml2str(self.to_xml()) + + @staticmethod + def from_xml(xml_str): + """Creates a Container from the given xml string. + + @return The created Container. + """ + + c = Container() + xml = etree.fromstring(xml_str) + + for element in xml: + e = _parse_single_xml_element(element) + c.append(e) + + return c + + @staticmethod + def _response_to_entities(http_response): + """Parse the response of a Http-request. + + Note: Method is intended for the internal use. + """ + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + if xml.tag.lower() == "response": + c = Container() + + for child in xml: + e = _parse_single_xml_element(child) + + if isinstance(e, Message): + c.messages.append(e) + elif isinstance(e, Query): + c.query = e + + if e.messages is not None: + c.messages.extend(e.messages) + elif isinstance(e, (Entity, QueryTemplate)): + e.is_deleted = lambda: False + + if e.has_errors() is True: + e.is_valid = lambda: False + elif e.id is None or e.id < 0: + e.is_valid = lambda: False + else: + e.is_valid = lambda: True + c.append(e) + else: + # ignore + pass + c._timestamp = xml.get("timestamp") + c._srid = xml.get("srid") + + return c + else: + raise LinkAheadException( + "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") + + def _sync(self, container, unique, raise_exception_on_error, + name_case_sensitive=False, strategy=_basic_sync): + """Synchronize this container (C1) with another container (C2). + + That is: 1) Synchronize any entity e1 in C1 with the + corresponding entity e2 from C2 via e1._sync(c2). 2) Add any + leftover entity from C2 to C1. + """ + # TODO: This method is extremely slow. E.g. 30 seconds for 1000 + # entities. + + sync_dict = self._calc_sync_dict( + remote_container=container, + unique=unique, + raise_exception_on_error=raise_exception_on_error, + name_case_sensitive=name_case_sensitive) + + # sync every entity in this container + + for entity in self: + try: + e_sync = sync_dict[entity] + + if e_sync is not None: + strategy(entity, e_sync.pop()) + + for e in e_sync: + self.append(e) + except KeyError: + pass + + # add leftover entities + try: + if sync_dict[self] is not None: + for e in sync_dict[self]: + self.append(e) + except KeyError: + pass + + # messages: + + for m in container.messages: + self.add_message(m) + + self._timestamp = container._timestamp + self._srid = container._srid + + def _calc_sync_dict(self, remote_container, unique, + raise_exception_on_error, name_case_sensitive): + # self is local, remote_container is remote. + + # which is to be synced with which: + # sync_dict[local_entity]=sync_remote_enities + sync_dict = dict() + + # list of remote entities which already have a local equivalent + used_remote_entities = [] + + # match by cuid + + for local_entity in self: + + sync_dict[local_entity] = None + + if local_entity._cuid is not None: + # a list of remote entities which are equivalents of + # local_entity + sync_remote_entities = [] + + for remote_entity in remote_container: + if remote_entity._cuid is not None and str(remote_entity._cuid) == str( + local_entity._cuid) and remote_entity not in used_remote_entities: + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. CUID " + \ + str(local_entity._cuid) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by id + + for local_entity in self: + if sync_dict[local_entity] is None and local_entity.id is not None: + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.id is not None + and remote_entity.id == local_entity.id + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. ID " + \ + str(local_entity.id) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by path + + for local_entity in self: + if (sync_dict[local_entity] is None + and local_entity.path is not None): + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.path is not None + and str(remote_entity.path) == ( + local_entity.path + + if local_entity.path.startswith("/") else "/" + + local_entity.path) + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. Path " + \ + str(local_entity.path) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # match by name + + for local_entity in self: + if (sync_dict[local_entity] is None + and local_entity.name is not None): + sync_remote_entities = [] + + for remote_entity in remote_container: + if (remote_entity.name is not None + and (str(remote_entity.name) == str(local_entity.name) + or + (name_case_sensitive is False and + str(remote_entity.name).lower() == str( + local_entity.name).lower())) + and remote_entity not in used_remote_entities): + sync_remote_entities.append(remote_entity) + used_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[local_entity] = sync_remote_entities + + if unique and len(sync_remote_entities) > 1: + msg = "Request was not unique. Name " + \ + str(local_entity.name) + " was found " + \ + str(len(sync_remote_entities)) + " times." + local_entity.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + # add remaining entities to this remote_container + sync_remote_entities = [] + + for remote_entity in remote_container: + if not (remote_entity in used_remote_entities): + sync_remote_entities.append(remote_entity) + + if len(sync_remote_entities) > 0: + sync_dict[self] = sync_remote_entities + + if unique and len(sync_remote_entities) != 0: + msg = "Request was not unique. There are " + \ + str(len(sync_remote_entities)) + \ + " entities which could not be matched to one of the requested ones." + remote_container.add_message(Message(description=msg, type="Error")) + + if raise_exception_on_error: + raise MismatchingEntitiesError(msg) + + return sync_dict + + def _test_dependencies_in_container(self, container): + """This function returns those elements of a given container that are a dependency of another element of the same container. + + Args: + container (Container): a linkahead container + + Returns: + [set]: a set of unique elements that are a dependency of another element of `container` + """ + item_id = set() + is_parent = set() + is_property = set() + is_being_referenced = set() + dependent_parents = set() + dependent_properties = set() + dependent_references = set() + dependencies = set() + + for container_item in container: + item_id.add(container_item.id) + + for parents in container_item.get_parents(): + is_parent.add(parents.id) + + for references in container_item.get_properties(): + if is_reference(references.datatype): + # add only if it is a reference, not a property + + if references.value is None: + continue + elif isinstance(references.value, int): + is_being_referenced.add(references.value) + elif is_list_datatype(references.datatype): + for list_item in references.value: + if isinstance(list_item, int): + is_being_referenced.add(list_item) + else: + is_being_referenced.add(list_item.id) + else: + try: + is_being_referenced.add(references.value.id) + except AttributeError: + pass + + if hasattr(references, 'id'): + is_property.add(references.id) + + dependent_parents = item_id.intersection(is_parent) + dependent_properties = item_id.intersection(is_property) + dependent_references = item_id.intersection(is_being_referenced) + dependencies = dependent_parents.union(dependent_references) + dependencies = dependencies.union(dependent_properties) + + return dependencies + + def delete(self, raise_exception_on_error=True, flags=None, chunk_size=100): + """Delete all entities in this container. + + Entities are identified via their id if present and via their + name otherwise. If any entity has no id and no name a + TransactionError will be raised. + + Note: If only a name is given this could lead to ambiguities. If + this happens, none of them will be deleted. It occurs an error + instead. + """ + item_count = len(self) + # Split Container in 'chunk_size'-sized containers (if necessary) to avoid error 414 Request-URI Too Long + + if item_count > chunk_size: + dependencies = self._test_dependencies_in_container(self) + ''' + If there are as many dependencies as entities in the container and it is larger than chunk_size it cannot be split and deleted. + This case cannot be handled at the moment. + ''' + + if len(dependencies) == item_count: + if raise_exception_on_error: + te = TransactionError( + msg="The container is too large and with too many dependencies within to be deleted.", + container=self) + raise te + + return self + + # items which have to be deleted later because of dependencies. + dependencies_delete = Container() + + for i in range(0, int(item_count/chunk_size)+1): + chunk = Container() + + for j in range(i*chunk_size, min(item_count, (i+1)*chunk_size)): + if len(dependencies): + if self[j].id in dependencies: + dependencies_delete.append(self[j]) + else: + chunk.append(self[j]) + else: + chunk.append(self[j]) + + if len(chunk): + chunk.delete() + if len(dependencies_delete): + dependencies_delete.delete() + + return self + + if len(self) == 0: + if raise_exception_on_error: + te = TransactionError( + msg="There are no entities to be deleted. This container is empty.", + container=self) + raise te + + return self + self.clear_server_messages() + + c = get_connection() + id_str = [] + + for entity in self: + if entity.is_deleted(): + continue + entity._cuid = None + + if entity.id is not None: + id_str.append(str(entity.id)) + elif entity.name is not None: + id_str.append(str(entity.name)) + else: + entity.add_message( + Message( + type="Error", + description="This entity has no identifier. It cannot be deleted.")) + + if raise_exception_on_error: + ee = EntityError( + "This entity has no identifier. It cannot be deleted.", entity) + raise TransactionError(ee) + entity.is_valid = lambda: False + + if len(id_str) == 0: + if raise_exception_on_error: + te = TransactionError( + msg="There are no entities to be deleted.", + container=self) + raise te + + return self + entity_url_segments = [_ENTITY_URI_SEGMENT, "&".join(id_str)] + + _log_request("DELETE: " + str(entity_url_segments) + + ("?" + str(flags) if flags is not None else '')) + + http_response = c.delete(entity_url_segments, query_dict=flags) + cresp = Container._response_to_entities(http_response) + self._sync(cresp, raise_exception_on_error=raise_exception_on_error, + unique=True, strategy=_deletion_sync) + + if raise_exception_on_error: + raise_errors(self) + + return self + + def retrieve(self, query=None, unique=True, + raise_exception_on_error=True, sync=True, flags=None): + """Retrieve all entities in this container identified via their id if + present and via their name otherwise. Any locally already existing + attributes (name, description, ...) will be preserved. Any such + properties and parents will be synchronized as well. They will not be + overridden. This method returns a Container containing the this entity. + + If any entity has no id and no name a LinkAheadException will be raised. + + Note: If only a name is given this could lead to ambiguities. All entities with the name in question + will be returned. Therefore, the container could contain more elements after the retrieval than + before. + """ + + if isinstance(query, list): + self.extend(query) + query = None + cresp = Container() + entities_str = [] + + if query is None: + for entity in self: + if entity.id is not None and entity.id < 0: + entity.id = None + entity.clear_server_messages() + + if entity.id is not None: + entities_str.append(str(entity.id)) + elif entity.name is not None: + entities_str.append(str(entity.name)) + elif entity.path is not None: + # fetch by path (files only) + cresp.extend(execute_query( + "FIND FILE . STORED AT \"" + str(entity.path) + "\"", unique=False)) + else: + entity.add_message( + Message( + type="Error", + description="This entity has no identifier. It cannot be retrieved.")) + + if raise_exception_on_error: + ee = EntityError( + "This entity has no identifier. It cannot be retrieved.", + entity) + raise TransactionError(ee) + entity.is_valid = lambda: False + else: + entities_str.append(str(query)) + + self.clear_server_messages() + cresp2 = self._retrieve(entities=entities_str, flags=flags) + cresp.extend(cresp2) + cresp.messages.extend(cresp2.messages) + + if raise_exception_on_error: + raise_errors(cresp) + + if sync: + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + return self + else: + return cresp + + @staticmethod + def _split_uri_string(entities): + + # get half length of entities_str + hl = len(entities) // 2 + + # split in two uris + + return (entities[0:hl], entities[hl:len(entities)]) + + def _retrieve(self, entities, flags): + c = get_connection() + try: + _log_request("GET: " + _ENTITY_URI_SEGMENT + str(entities) + + ('' if flags is None else "?" + str(flags))) + http_response = c.retrieve( + entity_uri_segments=[ + _ENTITY_URI_SEGMENT, str( + "&".join(entities))], query_dict=flags) + + return Container._response_to_entities(http_response) + except HTTPURITooLongError as uri_e: + try: + # split up + uri1, uri2 = Container._split_uri_string(entities) + except ValueError as val_e: + raise uri_e from val_e + c1 = self._retrieve(entities=uri1, flags=flags) + c2 = self._retrieve(entities=uri2, flags=flags) + c1.extend(c2) + c1.messages.extend(c2.messages) + + return c1 + + def clear_server_messages(self): + self.messages.clear_server_messages() + + for entity in self: + entity.clear_server_messages() + + return self + + @staticmethod + def _dir_to_http_parts(root, d, upload): # @ReservedAssignment + ret = [] + x = (root + '/' + d if d is not None else root) + + for f in listdir(x): + if isdir(x + '/' + f): + part = MultipartParam( + name=hex(randint(0, sys.maxsize)), value="") + part.filename = upload + \ + ('/' + d + '/' if d is not None else '/') + f + '/' + ret.extend(Container._dir_to_http_parts( + root, (d + '/' + f if d is not None else f), upload)) + else: + part = MultipartParam.from_file( + paramname=hex(randint(0, sys.maxsize)), filename=x + '/' + f) + part.filename = upload + \ + ('/' + d + '/' if d is not None else '/') + f + ret.append(part) + + return ret + + def update(self, strict=False, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + """Update these entites.""" + + if len(self) < 1: + te = TransactionError( + msg="There are no entities to be updated. This container is empty.", + container=self) + raise te + + self.clear_server_messages() + insert_xml = etree.Element("Update") + http_parts = [] + + if flags is None: + flags = {} + + if strict is True: + flags["strict"] = "true" + + if unique is True: + flags["uniquename"] = "true" + + for entity in self: + if (entity.id is None or entity.id < 0): + ee = EntityError( + "You tried to update an entity without a valid id.", + entity) + raise TransactionError(ee) + + self._linearize() + + for entity in self: + + # process files if present + Container._process_file_if_present_and_add_to_http_parts( + http_parts, entity) + + for entity in self: + entity_xml = entity.to_xml() + + if hasattr(entity, '_upload') and entity._upload is not None: + entity_xml.set("upload", entity._upload) + elif hasattr(entity, 'pickup') and entity.pickup is not None: + entity_xml.set("pickup", entity.pickup) + + insert_xml.append(entity_xml) + + _log_request("PUT: " + _ENTITY_URI_SEGMENT + + ('' if flags is None else "?" + str(flags)), insert_xml) + + con = get_connection() + + if http_parts is not None and len(http_parts) > 0: + http_parts.insert( + 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) + body, headers = multipart_encode(http_parts) + + http_response = con.update( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + query_dict=flags, + body=body, + headers=headers) + else: + http_response = con.update( + entity_uri_segment=[_ENTITY_URI_SEGMENT], query_dict=flags, + body=xml2str(insert_xml)) + + cresp = Container._response_to_entities(http_response) + + if raise_exception_on_error: + raise_errors(cresp) + + if sync: + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + return self + else: + return cresp + + @staticmethod + def _process_file_if_present_and_add_to_http_parts(http_parts, entity): + if isinstance(entity, File) and hasattr( + entity, 'file') and entity.file is not None: + new_checksum = File._get_checksum(entity.file) + + # do not transfer unchanged files. + + if entity._checksum is not None and entity._checksum.lower() == new_checksum.lower(): + entity._upload = None + + return + + entity._size = None + entity._checksum = new_checksum + entity._upload = hex(randint(0, sys.maxsize)) + + if hasattr(entity.file, "name"): + _file = entity.file.name + else: + _file = entity.file + + if isdir(_file): + http_parts.extend( + Container._dir_to_http_parts(_file, None, entity._upload)) + part = MultipartParam( + name=hex(randint(0, sys.maxsize)), value="") + part.filename = entity._upload + '/' + else: + part = MultipartParam.from_file( + paramname=hex(randint(0, sys.maxsize)), filename=_file) + part.filename = entity._upload + http_parts.append(part) + + if entity.thumbnail is not None: + part = MultipartParam.from_file(paramname=hex( + randint(0, sys.maxsize)), filename=entity.thumbnail) + part.filename = entity._upload + ".thumbnail" + http_parts.append(part) + else: + entity._checksum = None + + def insert(self, strict=False, raise_exception_on_error=True, + unique=True, sync=True, flags=None): + """Insert this file entity into LinkAhead. A successful insertion will + generate a new persistent ID for this entity. This entity can be + identified, retrieved, updated, and deleted via this ID until it has + been deleted. + + If the insertion fails, a LinkAheadException will be raised. The server will have returned at + least one error-message describing the reason why it failed in that case (call + <this_entity>.get_all_messages() in order to get these error-messages). + + Some insertions might cause warning-messages on the server-side, but the entities are inserted + anyway. Set the flag 'strict' to True in order to force the server to take all warnings as errors. + This prevents the server from inserting this entity if any warning occurs. + + @param strict=False: Flag for strict mode. + @param sync=True: synchronize this container with the response from the server. Otherwise, + this method returns a new container with the inserted entities and leaves this container untouched. + """ + + self.clear_server_messages() + insert_xml = etree.Element("Insert") + http_parts = [] + + if flags is None: + flags = {} + + if strict: + flags["strict"] = "true" + + if unique: + flags["uniquename"] = "true" + + self._linearize() + + # TODO: This is a possible solution for ticket#137 +# retrieved = Container() +# for entity in self: +# if entity.is_valid(): +# retrieved.append(entity) +# if len(retrieved)>0: +# retrieved = retrieved.retrieve(raise_exception_on_error=False, sync=False) +# for e_remote in retrieved: +# if e_remote.id is not None: +# try: +# self.get_entity_by_id(e_remote.id).is_valid=e_remote.is_valid +# continue +# except KeyError: +# pass +# if e_remote.name is not None: +# try: +# self.get_entity_by_name(e_remote.name).is_valid=e_remote.is_valid +# continue +# except KeyError: +# pass + for entity in self: + if entity.is_valid(): + continue + + # process files if present + Container._process_file_if_present_and_add_to_http_parts( + http_parts, entity) + + for entity in self: + if entity.is_valid(): + continue + entity_xml = entity.to_xml() + + if hasattr(entity, '_upload') and entity._upload is not None: + entity_xml.set("upload", entity._upload) + elif hasattr(entity, 'pickup') and entity.pickup is not None: + entity_xml.set("pickup", entity.pickup) + insert_xml.append(entity_xml) + + if len(self) > 0 and len(insert_xml) < 1: + te = TransactionError( + msg="There are no entities to be inserted. This container contains existent entities only.", + container=self) + raise te + _log_request("POST: " + _ENTITY_URI_SEGMENT + + ('' if flags is None else "?" + str(flags)), insert_xml) + + con = get_connection() + + if http_parts is not None and len(http_parts) > 0: + http_parts.insert( + 0, MultipartParam("FileRepresentation", xml2str(insert_xml))) + + body, headers = multipart_encode(http_parts) + http_response = con.insert( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + body=body, + headers=headers, + query_dict=flags) + else: + http_response = con.insert( + entity_uri_segment=[_ENTITY_URI_SEGMENT], + body=xml2str(insert_xml), + query_dict=flags) + + cresp = Container._response_to_entities(http_response) + + if sync: + + self._sync(cresp, unique=unique, + raise_exception_on_error=raise_exception_on_error) + + if raise_exception_on_error: + raise_errors(self) + + return self + else: + if raise_exception_on_error: + raise_errors(cresp) + + return cresp + + @staticmethod + def _get_smallest_tmpid(entity): + tmpid = 0 + + if entity.id is not None: + tmpid = min(tmpid, int(entity.id)) + + for p in entity.get_parents(): + if p.id is not None: + tmpid = min(tmpid, int(p.id)) + + for p in entity.get_properties(): + if p.id is not None: + tmpid = min(tmpid, Container._get_smallest_tmpid(p)) + + return tmpid + + def _linearize(self): + tmpid = 0 + ''' users might already have specified some tmpids. -> look for smallest.''' + + for e in self: + tmpid = min(tmpid, Container._get_smallest_tmpid(e)) + + tmpid -= 1 + + '''a tmpid for every entity''' + + for e in self: + if e.id is None: + e.id = tmpid + tmpid -= 1 + + # CUID + + if e._cuid is None or e._cuid == 'None' or e._cuid == '': + e._cuid = str(e.id) + "--" + str(uuid()) + + '''dereference properties and parents''' + + for e in self: + """properties.""" + + for p in e.get_properties(): + if p.id is None: + if p.name is not None: + # TODO using try except for normal execution flow is bad style + try: + w = self.get_entity_by_name(p.name) + p._wrap(w) + except KeyError: + pass + + '''parents''' + + for p in e.get_parents(): + if p.id is None: + if p.name is not None: + # TODO using try except for normal execution flow is bad style + try: + p._wrap(self.get_entity_by_name(p.name)) + except KeyError: + pass + + return self + + def get_property_values(self, *selectors): + """ Return a list of tuples with values of the given selectors. + + I.e. a tabular representation of the container's content. + + If the elements of the selectors parameter are tuples, they will return + the properties of the referenced entity, if present. E.g. ("window", + "height") will return the value of the height property of the + referenced window entity. + + All tuples of the returned list have the same length as the selectors + parameter and the ordering of the tuple's values correspond to the + order of the parameter as well. + + The tuple contains None for all values that are not available in the + entity. That does not necessarily mean, that the values are not stored + in the database (e.g. if a single entity was retrieved without + referenced entities). + + Parameters + ---------- + *selectors : str or tuple of str + Each selector is a list or tuple of property names, e.g. `"height", + "width"`. + + Returns + ------- + table : list of tuples + A tabular representation of the container's content. + """ + table = [] + + for e in self: + table.append(e.get_property_values(*selectors)) + + return table + + +def sync_global_acl(): + c = get_connection() + http_response = c.retrieve(entity_uri_segments=["EntityPermissions"]) + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + if xml.tag.lower() == "response": + for child in xml: + if child.tag == "EntityPermissions": + Permissions.known_permissions = Permissions(child) + + for pelem in child: + if pelem.tag == "EntityACL": + ACL.global_acl = ACL(xml=pelem) + else: + raise LinkAheadException( + "The server's response didn't contain the expected elements. The configuration of this client might be invalid (especially the url).") + + +def get_known_permissions(): + if Permissions.known_permissions is None: + sync_global_acl() + + return Permissions.known_permissions + + +def get_global_acl(): + if ACL.global_acl is None: + sync_global_acl() + + return ACL.global_acl + + +class ACI(): + def __init__(self, realm, username, role, permission): + self.role = role + self.username = username + self.realm = realm + self.permission = permission + + def __hash__(self): + return hash(self.__repr__()) + + def __eq__(self, other): + return isinstance(other, ACI) and (self.role is None and self.username == other.username and self.realm == + other.realm) or self.role == other.role and self.permission == other.permission + + def __repr__(self): + return str(self.realm) + ":" + str(self.username) + ":" + str(self.role) + ":" + str(self.permission) + + def add_to_element(self, e): + if self.role is not None: + e.set("role", self.role) + else: + e.set("username", self.username) + + if self.realm is not None: + e.set("realm", self.realm) + p = etree.Element("Permission") + p.set("name", self.permission) + e.append(p) + + +class ACL(): + + global_acl = None + + def __init__(self, xml=None): + if xml is not None: + self.parse_xml(xml) + else: + self.clear() + + def parse_xml(self, xml): + """Clear this ACL and parse the xml. + + Iterate over the rules in the xml and add each rule to this ACL. + + Contradicting rules will both be kept. + + Parameters + ---------- + xml : lxml.etree.Element + The xml element containing the ACL rules, i.e. <Grant> and <Deny> + rules. + """ + self.clear() + self._parse_xml(xml) + + def _parse_xml(self, xml): + """Parse the xml. + + Iterate over the rules in the xml and add each rule to this ACL. + + Contradicting rules will both be kept. + + Parameters + ---------- + xml : lxml.etree.Element + The xml element containing the ACL rules, i.e. <Grant> and <Deny> + rules. + """ + # @review Florian Spreckelsen 2022-03-17 + for e in xml: + role = e.get("role") + username = e.get("username") + realm = e.get("realm") + priority = e.get("priority") + + for p in e: + if p.tag == "Permission": + permission = p.get("name") + + if e.tag == "Grant": + self.grant(username=username, realm=realm, role=role, + permission=permission, priority=priority, + revoke_denial=False) + elif e.tag == "Deny": + self.deny(username=username, realm=realm, role=role, + permission=permission, priority=priority, + revoke_grant=False) + + def combine(self, other): + """ Combine and return new instance.""" + result = ACL() + result._grants.update(other._grants) + result._grants.update(self._grants) + result._denials.update(other._denials) + result._denials.update(self._denials) + result._priority_grants.update(other._priority_grants) + result._priority_grants.update(self._priority_grants) + result._priority_denials.update(other._priority_denials) + result._priority_denials.update(self._priority_denials) + + return result + + def __eq__(self, other): + return isinstance(other, ACL) and other._grants == self._grants and self._denials == other._denials and self._priority_grants == other._priority_grants and self._priority_denials == other._priority_denials + + def is_empty(self): + return len(self._grants) + len(self._priority_grants) + \ + len(self._priority_denials) + len(self._denials) == 0 + + def clear(self): + self._grants = set() + self._denials = set() + self._priority_grants = set() + self._priority_denials = set() + + def _get_boolean_priority(self, priority): + return str(priority).lower() in ["true", "1", "yes", "y"] + + def _remove_item(self, item, priority): + try: + self._denials.remove(item) + except KeyError: + pass + try: + self._grants.remove(item) + except KeyError: + pass + + if priority: + try: + self._priority_denials.remove(item) + except KeyError: + pass + try: + self._priority_grants.remove(item) + except KeyError: + pass + + def revoke_grant(self, username=None, realm=None, + role=None, permission=None, priority=False): + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + + if priority: + if item in self._priority_grants: + self._priority_grants.remove(item) + + if item in self._grants: + self._grants.remove(item) + + def revoke_denial(self, username=None, realm=None, + role=None, permission=None, priority=False): + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + + if priority: + if item in self._priority_denials: + self._priority_denials.remove(item) + + if item in self._denials: + self._denials.remove(item) + + def grant(self, permission, username=None, realm=None, role=None, + priority=False, revoke_denial=True): + """Grant a permission to a user or role. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing denial rule would be revoked, because + otherwise this grant wouldn't have any effect. However, for keeping + contradicting rules pass revoke_denial=False. + + Parameters + ---------- + permission: str + The permission to be granted. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is granted with priority over non-priority + rules. + revoke_denial: bool, default True + Whether a contradicting denial (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + if revoke_denial: + self._remove_item(item, priority) + + if priority is True: + self._priority_grants.add(item) + else: + self._grants.add(item) + + def deny(self, username=None, realm=None, role=None, + permission=None, priority=False, revoke_grant=True): + """Deny a permission to a user or role for this entity. + + You must specify either only the username and the realm, or only the + role. + + By default a previously existing grant rule would be revoked, because + otherwise this denial would override the grant rules anyways. However, + for keeping contradicting rules pass revoke_grant=False. + + Parameters + ---------- + permission: str + The permission to be denied. + username : str, optional + The username. Exactly one is required, either the `username` or the + `role`. + realm: str, optional + The user's realm. Required when username is not None. + role: str, optional + The role (as in Role-Based Access Control). Exactly one is + required, either the `username` or the `role`. + priority: bool, default False + Whether this permission is denied with priority over non-priority + rules. + revoke_grant: bool, default True + Whether a contradicting grant (with same priority flag) in this + ACL will be revoked. + """ + # @review Florian Spreckelsen 2022-03-17 + priority = self._get_boolean_priority(priority) + item = ACI(role=role, username=username, + realm=realm, permission=permission) + if revoke_grant: + self._remove_item(item, priority) + + if priority is True: + self._priority_denials.add(item) + else: + self._denials.add(item) + + def to_xml(self, xml=None): + if xml is None: + xml = etree.Element("EntityACL") + + for aci in self._grants: + e = etree.Element("Grant") + e.set("priority", "False") + aci.add_to_element(e) + xml.append(e) + + for aci in self._denials: + e = etree.Element("Deny") + e.set("priority", "False") + aci.add_to_element(e) + xml.append(e) + + for aci in self._priority_grants: + e = etree.Element("Grant") + e.set("priority", "True") + aci.add_to_element(e) + xml.append(e) + + for aci in self._priority_denials: + e = etree.Element("Deny") + e.set("priority", "True") + aci.add_to_element(e) + xml.append(e) + + return xml + + def get_acl_for_role(self, role): + ret = ACL() + + for aci in self._grants: + if aci.role == role: + ret._grants.add(aci) + + for aci in self._denials: + if aci.role == role: + ret._denials.add(aci) + + for aci in self._priority_grants: + if aci.role == role: + ret._priority_grants.add(aci) + + for aci in self._priority_denials: + if aci.role == role: + ret._priority_denials.add(aci) + + return ret + + def get_acl_for_user(self, username, realm=None): + ret = ACL() + + for aci in self._grants: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._grants.add(aci) + + for aci in self._denials: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._denials.add(aci) + + for aci in self._priority_grants: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._priority_grants.add(aci) + + for aci in self._priority_denials: + if aci.username == username and ( + realm is None or aci.realm == realm): + ret._priority_denials.add(aci) + + return ret + + def get_permissions_for_user(self, username, realm=None): + acl = self.get_acl_for_user(username, realm) + _grants = set() + + for aci in acl._grants: + _grants.add(aci.permission) + _denials = set() + + for aci in acl._denials: + _denials.add(aci.permission) + _priority_grants = set() + + for aci in acl._priority_grants: + _priority_grants.add(aci.permission) + _priority_denials = set() + + for aci in acl._priority_denials: + _priority_denials.add(aci.permission) + + return ((_grants - _denials) | _priority_grants) - _priority_denials + + def get_permissions_for_role(self, role): + acl = self.get_acl_for_role(role) + _grants = set() + + for aci in acl._grants: + _grants.add(aci.permission) + _denials = set() + + for aci in acl._denials: + _denials.add(aci.permission) + _priority_grants = set() + + for aci in acl._priority_grants: + _priority_grants.add(aci.permission) + _priority_denials = set() + + for aci in acl._priority_denials: + _priority_denials.add(aci.permission) + + return ((_grants - _denials) | _priority_grants) - _priority_denials + + def is_permitted(self, role, permission): + return permission in self.get_permissions_for_role(role) + + def __repr__(self): + return xml2str(self.to_xml()) + + +class Query(): + """Query + + Attributes + ---------- + q : str + The query string. + flags : dict of str + A dictionary of flags to be send with the query request. + messages : Messages() + A container of messages included in the last query response. + cached : bool + indicates whether the server used the query cache for the execution of + this query. + results : int or Container + The number of results (when this was a count query) or the container + with the resulting entities. + """ + + def putFlag(self, key, value=None): + self.flags[key] = value + + return self + + def removeFlag(self, key): + return self.flags.pop(key) + + def getFlag(self, key): + return self.flags.get(key) + + def __init__(self, q): + self.flags = dict() + self.messages = Messages() + self.cached = None + self.etag = None + + if isinstance(q, etree._Element): + self.q = q.get("string") + self.results = int(q.get("results")) + + if q.get("cached") is None: + self.cached = False + else: + self.cached = q.get("cached").lower() == "true" + self.etag = q.get("etag") + + for m in q: + if m.tag.lower() == 'warning' or m.tag.lower() == 'error': + self.messages.append(_parse_single_xml_element(m)) + else: + self.q = q + + def _query_request(self, query_dict): + """Used internally to execute the query request...""" + _log_request("GET Entity?" + str(query_dict), None) + connection = get_connection() + http_response = connection.retrieve( + entity_uri_segments=["Entity"], + query_dict=query_dict) + cresp = Container._response_to_entities(http_response) + return cresp + + def _paging_generator(self, first_page, query_dict, page_length): + """Used internally to create a generator of pages instead instead of a + container which contais all the results.""" + if len(first_page) == 0: + return # empty page + yield first_page + index = page_length + while self.results > index: + query_dict["P"] = f"{index}L{page_length}" + next_page = self._query_request(query_dict) + etag = next_page.query.etag + if etag is not None and etag != self.etag: + raise PagingConsistencyError("The database state changed while retrieving the pages") + yield next_page + index += page_length + + def execute(self, unique=False, raise_exception_on_error=True, cache=True, + page_length=None): + """Execute a query (via a server-requests) and return the results. + + Parameters + ---------- + + unique : bool + Whether the query is expected to have only one entity as result. + Defaults to False. + raise_exception_on_error : bool + Whether an exception should be raises when there are errors in the + resulting entities. Defaults to True. + cache : bool + Whether to use the server-side query cache (equivalent to adding a + "cache" flag) to the Query object. Defaults to True. + page_length : int + Whether to use paging. If page_length > 0 this method returns a + generator (to be used in a for-loop or with list-comprehension). + The generator yields containers with up to page_length entities. + Otherwise, paging is disabled, as well as for count queries and + when unique is True. Defaults to None. + + Raises: + ------- + PagingConsistencyError + If the database state changed between paged requests. + + Yields + ------ + page : Container + Returns a container with the next `page_length` resulting entities. + + Returns + ------- + results : Container or integer + Returns an integer when it was a `COUNT` query. Otherwise, returns a + Container with the resulting entities. + """ + flags = self.flags + + if cache is False: + flags["cache"] = "false" + query_dict = dict(flags) + query_dict["query"] = str(self.q) + + has_paging = False + is_count_query = self.q.split()[0].lower() == "count" if len(self.q.split()) > 0 else False + + if not unique and not is_count_query and page_length is not None and page_length > 0: + has_paging = True + query_dict["P"] = f"0L{page_length}" + + # retreive first/only page + cresp = self._query_request(query_dict) + + self.results = cresp.query.results + self.cached = cresp.query.cached + self.etag = cresp.query.etag + + if is_count_query: + return self.results + + if raise_exception_on_error: + raise_errors(cresp) + + if unique: + if len(cresp) > 1 and raise_exception_on_error: + raise QueryNotUniqueError( + "Query '{}' wasn't unique.".format(self.q)) + + if len(cresp) == 0 and raise_exception_on_error: + raise EmptyUniqueQueryError( + "Query '{}' found no results.".format(self.q)) + + if len(cresp) == 1: + r = cresp[0] + r.messages.extend(cresp.messages) + + return r + self.messages = cresp.messages + + if has_paging: + return self._paging_generator(cresp, query_dict, page_length) + else: + return cresp + + +def execute_query(q, unique=False, raise_exception_on_error=True, cache=True, + flags=None, page_length=None): + """Execute a query (via a server-requests) and return the results. + + Parameters + ---------- + + q : str + The query string. + unique : bool + Whether the query is expected to have only one entity as result. + Defaults to False. + raise_exception_on_error : bool + Whether an exception should be raised when there are errors in the + resulting entities. Defaults to True. + cache : bool + Whether to use the query server-side cache (equivalent to adding a + "cache" flag). Defaults to True. + flags : dict of str + Flags to be added to the request. + page_length : int + Whether to use paging. If page_length > 0 this method returns a + generator (to be used in a for-loop or with list-comprehension). + The generator yields containers with up to page_length entities. + Otherwise, paging is disabled, as well as for count queries and + when unique is True. Defaults to None. + + Raises: + ------- + PagingConsistencyError + If the database state changed between paged requests. + + Yields + ------ + page : Container + Returns a container with the next `page_length` resulting entities. + + Returns + ------- + results : Container or integer + Returns an integer when it was a `COUNT` query. Otherwise, returns a + Container with the resulting entities. + """ + query = Query(q) + + if flags is not None: + query.flags = flags + + return query.execute(unique=unique, + raise_exception_on_error=raise_exception_on_error, + cache=cache, page_length=page_length) + + +class DropOffBox(list): + def __init__(self, *args, **kwargs): + warn(DeprecationWarning( + "The DropOffBox is deprecated and will be removed in future.")) + super().__init__(*args, **kwargs) + + path = None + + def sync(self): + c = get_connection() + _log_request("GET: Info") + http_response = c.retrieve(["Info"]) + body = http_response.read() + _log_response(body) + + xml = etree.fromstring(body) + + for child in xml: + if child.tag.lower() == "stats": + infoelem = child + + break + + for child in infoelem: + if child.tag.lower() == "dropoffbox": + dropoffboxelem = child + + break + del self[:] + self.path = dropoffboxelem.get('path') + + for f in dropoffboxelem: + self.append(f.get('path')) + + return self + + +class UserInfo(): + + def __init__(self, xml): + self.roles = [role.text for role in xml.findall("Roles/Role")] + self.name = xml.get("username") + self.realm = xml.get("realm") + + +class Info(): + + def __init__(self): + self.messages = Messages() + self.sync() + + def sync(self): + c = get_connection() + try: + http_response = c.retrieve(["Info"]) + except LinkAheadConnectionError as conn_e: + print(conn_e) + + return + + xml = etree.fromstring(http_response.read()) + + for e in xml: + m = _parse_single_xml_element(e) + + if isinstance(m, UserInfo): + self.user_info = m + elif isinstance(m, TimeZone): + self.time_zone = m + else: + self.messages.append(m) + + def __str__(self): +<<<<<<< HEAD + if "Counts" not in [m.type for m in self.messages]: + return "caosdb.Info" +======= + if "counts" not in self.messages: + return "linkahead.Info" +>>>>>>> linkahead-rename-step-1 + + if int(self.messages["counts"]["records"]) > 0: + return "Connection to LinkAhead with {} Records." .format( + self.messages["counts"]["records"] + ) + else: + return "Connection to LinkAhead without Records." + + def __repr__(self): + return self.__str__() + + +class Permission(): + + def __init__(self, name, description=None): + self.name = name + self.description = description + + def __repr__(self): + return str(self) + + def __str__(self): + return self.name + + def __eq__(self, p): + if isinstance(p, Permission): + return p.name == self.name + + return False + + def __hash__(self): + return hash(self.name) + + +class Permissions(): + + known_permissions = None + + def __init__(self, xml): + self.parse_xml(xml) + + def clear(self): + self._perms = set() + + def parse_xml(self, xml): + self.clear() + + for e in xml: + if e.tag == "Permission": + self._perms.add(Permission(name=e.get("name"), + description=e.get("description"))) + + def __contains__(self, p): + if isinstance(p, Permission): + return p in self._perms + else: + return Permission(name=p) in self._perms + + def __repr__(self): + return str(self) + + def __str__(self): + return str(self._perms) + + +def parse_xml(xml): + """parse a string or tree representation of an xml document to a set of + entities (records, recordtypes, properties, or files). + + @param xml: a string or tree representation of an xml document. + @return: list of entities or single entity. + """ + + if isinstance(xml, etree._Element): + elem = xml + else: + elem = etree.fromstring(xml) + + return _parse_single_xml_element(elem) + + +def _parse_single_xml_element(elem): + classmap = { + 'record': Record, + 'recordtype': RecordType, + 'property': Property, + 'file': File, + 'parent': Parent, + 'entity': Entity} + + if elem.tag.lower() in classmap: + klass = classmap.get(elem.tag.lower()) + entity = klass() + Entity._from_xml(entity, elem) + + return entity + elif elem.tag.lower() == "version": + return Version.from_xml(elem) + elif elem.tag.lower() == "state": + return State.from_xml(elem) + elif elem.tag.lower() == "emptystring": + return "" + elif elem.tag.lower() == "value": + if len(elem) == 1 and elem[0].tag.lower() == "emptystring": + return "" + elif len(elem) == 1 and elem[0].tag.lower() in classmap: + return _parse_single_xml_element(elem[0]) + elif elem.text is None or elem.text.strip() == "": + return None + + return str(elem.text.strip()) + elif elem.tag.lower() == "querytemplate": + return QueryTemplate._from_xml(elem) + elif elem.tag.lower() == 'query': + return Query(elem) + elif elem.tag.lower() == 'history': + return Message(type='History', description=elem.get("transaction")) + elif elem.tag.lower() == 'stats': + counts = elem.find("counts") + + return Message(type="Counts", description=None, body=counts.attrib) + elif elem.tag == "EntityACL": + return ACL(xml=elem) + elif elem.tag == "Permissions": + return Permissions(xml=elem) + elif elem.tag == "UserInfo": + return UserInfo(xml=elem) + elif elem.tag == "TimeZone": + return TimeZone(zone_id=elem.get("id"), offset=elem.get("offset"), + display_name=elem.text.strip()) + else: + return Message(type=elem.tag, code=elem.get( + "code"), description=elem.get("description"), body=elem.text) + + +def _evaluate_and_add_error(parent_error, ent): + """Evaluate the error message(s) attached to entity and add a + corresponding exception to parent_error. + + Parameters: + ----------- + parent_error : TransactionError + Parent error to which the new exception will be attached. This + exception will be a direct child. + ent : Entity + Entity that caused the TransactionError. An exception is + created depending on its error message(s). + + Returns: + -------- + TransactionError : + Parent error with new exception(s) attached to it. + + """ + + if isinstance(ent, (Entity, QueryTemplate)): + # Check all error messages + found114 = False + found116 = False + + for err in ent.get_errors(): + # Evaluate specific EntityErrors depending on the error + # code + + if err.code is not None: + if int(err.code) == 101: # ent doesn't exist + new_exc = EntityDoesNotExistError(entity=ent, + error=err) + elif int(err.code) == 110: # ent has no data type + new_exc = EntityHasNoDatatypeError(entity=ent, + error=err) + elif int(err.code) == 403: # no permission + new_exc = AuthorizationError(entity=ent, + error=err) + elif int(err.code) == 152: # name wasn't unique + new_exc = UniqueNamesError(entity=ent, error=err) + elif int(err.code) == 114: # unqualified properties + found114 = True + new_exc = UnqualifiedPropertiesError(entity=ent, + error=err) + + for prop in ent.get_properties(): + new_exc = _evaluate_and_add_error(new_exc, + prop) + elif int(err.code) == 116: # unqualified parents + found116 = True + new_exc = UnqualifiedParentsError(entity=ent, + error=err) + + for par in ent.get_parents(): + new_exc = _evaluate_and_add_error(new_exc, + par) + else: # General EntityError for other codes + new_exc = EntityError(entity=ent, error=err) + else: # No error code causes a general EntityError, too + new_exc = EntityError(entity=ent, error=err) + parent_error.add_error(new_exc) + # Check for possible errors in parents and properties that + # weren't detected up to here + + if not found114: + dummy_err = EntityError(entity=ent) + + for prop in ent.get_properties(): + dummy_err = _evaluate_and_add_error(dummy_err, prop) + + if dummy_err.errors: + parent_error.add_error(dummy_err) + + if not found116: + dummy_err = EntityError(entity=ent) + + for par in ent.get_parents(): + dummy_err = _evaluate_and_add_error(dummy_err, par) + + if dummy_err.errors: + parent_error.add_error(dummy_err) + + elif isinstance(ent, Container): + parent_error.container = ent + + if ent.get_errors() is not None: + parent_error.code = ent.get_errors()[0].code + # In the highly unusual case of more than one error + # message, attach all of them. + parent_error.msg = '\n'.join( + [x.description for x in ent.get_errors()]) + # Go through all container elements and add them: + + for elt in ent: + parent_error = _evaluate_and_add_error(parent_error, elt) + + else: + raise TypeError("Parameter ent is to be an Entity or a Container") + + return parent_error + + +def raise_errors(arg0): + """Raise a TransactionError depending on the error code(s) inside + Entity, QueryTemplate or Container arg0. More detailed errors may + be attached to the TransactionError depending on the contents of + arg0. + + Parameters: + ----------- + arg0 : Entity, QueryTemplate, or Container + LinkAhead object whose messages are evaluated according to their + error codes + + """ + transaction_error = _evaluate_and_add_error(TransactionError(), + arg0) + # Raise if any error was found + + if len(transaction_error.all_errors) > 0: + raise transaction_error + # Cover the special case of an empty container with error + # message(s) (e.g. query syntax error) + + if (transaction_error.container is not None and + transaction_error.container.has_errors()): + raise transaction_error + + +def delete(ids, raise_exception_on_error=True): + c = Container() + + if isinstance(ids, list) or isinstance(ids, range): + for i in ids: + c.append(Entity(id=i)) + else: + c.append(Entity(id=ids)) + + return c.delete(raise_exception_on_error=raise_exception_on_error) diff --git a/src/linkahead/common/state.py b/src/linkahead/common/state.py new file mode 100644 index 0000000000000000000000000000000000000000..82f314e80191163f14a5c4babdd749f977f2901b --- /dev/null +++ b/src/linkahead/common/state.py @@ -0,0 +1,198 @@ +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header + +import copy +from lxml import etree + + +def _translate_to_state_acis(acis): + result = set() + for aci in acis: + aci = copy.copy(aci) + if aci.role: + aci.role = "?STATE?" + aci.role + "?" + result.add(aci) + return result + + +class Transition: + """Transition + + Represents allowed transitions from one state to another. + + Properties + ---------- + name : str + The name of the transition + description: str + The description of the transition + from_state : str + A state name + to_state : str + A state name + """ + + def __init__(self, name, from_state, to_state, description=None): + self._name = name + self._from_state = from_state + self._to_state = to_state + self._description = description + + @property + def name(self): + return self._name + + @property + def description(self): + return self._description + + @property + def from_state(self): + return self._from_state + + @property + def to_state(self): + return self._to_state + + def __repr__(self): + return f'Transition(name="{self.name}", from_state="{self.from_state}", to_state="{self.to_state}", description="{self.description}")' + + def __eq__(self, other): + return (isinstance(other, Transition) + and other.name == self.name + and other.to_state == self.to_state + and other.from_state == self.from_state) + + def __hash__(self): + return 23472 + hash(self.name) + hash(self.from_state) + hash(self.to_state) + + @staticmethod + def from_xml(xml): + to_state = [to.get("name") for to in xml + if to.tag.lower() == "tostate"] + from_state = [from_.get("name") for from_ in xml + if from_.tag.lower() == "fromstate"] + result = Transition(name=xml.get("name"), + description=xml.get("description"), + from_state=from_state[0] if from_state else None, + to_state=to_state[0] if to_state else None) + return result + + +class State: + """State + + Represents the state of an entity and take care of the serialization and + deserialization of xml for the entity state. + + An entity state is always a State of a StateModel. + + Properties + ---------- + name : str + Name of the State + model : str + Name of the StateModel + description : str + Description of the State (read-only) + id : str + Id of the undelying State record (read-only) + transitions : set of Transition + All transitions which are available from this state (read-only) + """ + + def __init__(self, model, name): + self.name = name + self.model = model + self._id = None + self._description = None + self._transitions = None + + @property + def id(self): + return self._id + + @property + def description(self): + return self._description + + @property + def transitions(self): + return self._transitions + + def __eq__(self, other): + return (isinstance(other, State) + and self.name == other.name + and self.model == other.model) + + def __hash__(self): + return hash(self.name) + hash(self.model) + + def __repr__(self): + return f"State('{self.model}', '{self.name}')" + + def to_xml(self): + """Serialize this State to xml. + + Returns + ------- + xml : etree.Element + """ + xml = etree.Element("State") + if self.name is not None: + xml.set("name", self.name) + if self.model is not None: + xml.set("model", self.model) + return xml + + @staticmethod + def from_xml(xml): + """Create a new State instance from an xml Element. + + Parameters + ---------- + xml : etree.Element + + Returns + ------- + state : State + """ + name = xml.get("name") + model = xml.get("model") + result = State(name=name, model=model) + result._id = xml.get("id") + result._description = xml.get("description") + transitions = [Transition.from_xml(t) for t in xml if t.tag.lower() == + "transition"] + if transitions: + result._transitions = set(transitions) + + return result + + @staticmethod + def create_state_acl(acl): + from .models import ACL + state_acl = ACL() + state_acl._grants = _translate_to_state_acis(acl._grants) + state_acl._denials = _translate_to_state_acis(acl._denials) + state_acl._priority_grants = _translate_to_state_acis(acl._priority_grants) + state_acl._priority_denials = _translate_to_state_acis(acl._priority_denials) + return state_acl diff --git a/src/linkahead/common/timezone.py b/src/linkahead/common/timezone.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc5e710d3cbf6f20cf81397573f972db3b22f12 --- /dev/null +++ b/src/linkahead/common/timezone.py @@ -0,0 +1,19 @@ +class TimeZone(): + """ + TimeZone, e.g. CEST, Europe/Berlin, UTC+4. + + + Attributes + ---------- + zone_id : string + ID of the time zone. + offset : int + Offset to UTC in seconds. + display_name : string + A human-friendly name of the time zone: + """ + + def __init__(self, zone_id, offset, display_name): + self.zone_id = zone_id + self.offset = offset + self.display_name = display_name diff --git a/src/linkahead/common/utils.py b/src/linkahead/common/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b333e3e07253397e54f3e2830a7cdcce42fa2e1e --- /dev/null +++ b/src/linkahead/common/utils.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""mising docstring.""" +from lxml import etree +from multiprocessing import Lock +from uuid import uuid4 +_uuid_lock = Lock() + + +def xml2str(xml): + return etree.tostring(xml, pretty_print=True, encoding='unicode') + + +def uuid(): + exc = None + ret = None + try: + _uuid_lock.acquire() + ret = uuid4() + except Exception as e: + exc = e + finally: + _uuid_lock.release() + if exc: + raise exc + return ret + + +def is_int(obj): + try: + int(obj) + return True + except ValueError: + return False diff --git a/src/linkahead/common/versioning.py b/src/linkahead/common/versioning.py new file mode 100644 index 0000000000000000000000000000000000000000..facfbc488e413e090ea1a856501ccd96334f8354 --- /dev/null +++ b/src/linkahead/common/versioning.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +""" Versioning module for anything related to entity versions. + +Currently this module defines nothing but a single class, `Version`. +""" + +from __future__ import absolute_import +from .utils import xml2str +from lxml import etree + + +class Version(): + """The version of an entity. + + An entity version has a version id (string), a date (UTC timestamp), a + list of predecessors and a list of successors. + + Parameters + ---------- + id : str, optional + See attribute `id`. Default: None + date : str, optional + See attribute `date`. Default: None + username : str, optional + See attribute `username`. Default: None + realm : str, optional + See attribute `realm`. Default: None + predecessors : list of Version, optional + See attribute `predecessors`. Default: empty list. + successors : list of Version, optional + See attribute `successors`. Default: empty list. + is_head : bool + See attribute `is_head`. Default: False + is_complete_history : bool + See attribute `is_complete_history`. Default: False + + Attributes + ---------- + id : str + Version ID (not the entity's id). + date : str + UTC Timestamp of the version, i.e. the date and time when the entity of + this version has been inserted or modified. + username : str + The username of the user who inserted or updated this version. + realm : str + The realm of the user who inserted or updated this version. + predecessors : list of Version + Predecessors are the older entity versions which have been modified + into this version. Usually, there is only one predecessor. However, + this API allows for entities to be merged into one entity, which would + result in more than one predecessor. + successors : list of Version + Successors are newer versions of this entity. If there are successors, + this version is not the latest version of this entity. Usually, there + is only one successor. However, this API allows that a single entity + may co-exist in several versions (e.g. several proposals for the next + entity status). That would result in more than one successor. + is_head : bool or string + If true, this indicates that this version is the HEAD if true. + Otherwise it is not known whether this is the head or not. Any string + matching "true" (case-insensitively) is regarded as True. + Nota bene: This property should typically be set if the server response + indicated that this is the head version. + is_complete_history : bool or string + If true, this indicates that this version contains the full version + history. That means, that the predecessors and successors have their + respective predecessors and successors attached as well and the tree is + completely available. Any string matching "true" (case-insensitively) + is regarded as True. + Nota bene: This property should typically be set if the server response + indicated that the full version history is included in its response. + """ + + # pylint: disable=redefined-builtin + def __init__(self, id=None, date=None, username=None, realm=None, + predecessors=None, successors=None, is_head=False, + is_complete_history=False): + """Typically the `predecessors` or `successors` should not "link back" to an existing Version +object.""" + self.id = id + self.date = date + self.username = username + self.realm = realm + self.predecessors = predecessors if predecessors is not None else [] + self.successors = successors if successors is not None else [] + self.is_head = str(is_head).lower() == "true" + self.is_complete_history = str(is_complete_history).lower() == "true" + + def get_history(self): + """ Returns a flat list of Version instances representing the history + of the entity. + + The list items are ordered by the relation between the versions, + starting with the oldest version. + + The items in the list have no predecessors or successors attached. + + Note: This method only returns reliable results if + `self.is_complete_history is True` and it will not retrieve the full + version history if it is not present. + + Returns + ------- + list of Version + """ + versions = [] + for p in self.predecessors: + # assuming that predecessors don't have any successors + versions = p.get_history() + versions.append(Version(id=self.id, date=self.date, + username=self.username, realm=self.realm)) + for s in self.successors: + # assuming that successors don't have any predecessors + versions.extend(s.get_history()) + return versions + + def to_xml(self, tag="Version"): + """Serialize this version to xml. + + The tag name is 'Version' per default. But since this method is called + recursively for the predecessors and successors as well, the tag name + can be configured. + + The resulting xml element contains attributes 'id' and 'date' and + 'Predecessor' and 'Successor' child elements. + + Parameters + ---------- + tag : str, optional + The name of the returned xml element. Defaults to 'Version'. + + Returns + ------- + xml : etree.Element + """ + xml = etree.Element(tag) + if self.id is not None: + xml.set("id", self.id) + if self.date is not None: + xml.set("date", self.date) + if self.username is not None: + xml.set("username", self.username) + if self.realm is not None: + xml.set("realm", self.realm) + if self.predecessors is not None: + for p in self.predecessors: + xml.append(p.to_xml(tag="Predecessor")) + if self.is_head is True: + xml.set("head", "true") + if self.successors is not None: + for s in self.successors: + xml.append(s.to_xml(tag="Successor")) + return xml + + def __str__(self): + """Return a stringified xml representation.""" + return self.__repr__() + + def __repr__(self): + """Return a stringified xml representation.""" + return xml2str(self.to_xml()) + + @staticmethod + def from_xml(xml): + """Parse a version object from a 'Version' xml element. + + Parameters + ---------- + xml : etree.Element + A 'Version' xml element, with 'id', possibly 'date', `username`, + `realm`, and `head` attributes as well as 'Predecessor' and + 'Successor' child elements. + + Returns + ------- + version : Version + a new version instance + """ + predecessors = [Version.from_xml(p) for p in xml if p.tag.lower() == "predecessor"] + successors = [Version.from_xml(s) for s in xml if s.tag.lower() == "successor"] + return Version(id=xml.get("id"), date=xml.get("date"), + is_head=xml.get("head"), + is_complete_history=xml.get("completeHistory"), + username=xml.get("username"), realm=xml.get("realm"), + predecessors=predecessors, successors=successors) + + def __hash__(self): + """Hash of the version instance. + + Also hashes the predecessors and successors. + """ + return (hash(self.id) + + hash(self.date) + + (Version._hash_list(self.predecessors) + if self.predecessors else 26335) + + (Version._hash_list(self.successors) + if self.successors else -23432)) + + @staticmethod + def _hash_list(_list): + """Hash a list by hashing each element and its index.""" + result = 12352 + for idx, val in enumerate(_list): + result += hash(val) + idx + return result + + @staticmethod + def _eq_list(this, that): + """List equality. + + List equality is defined as equality of each element, the order + and length. + """ + if len(this) != len(that): + return False + for v1, v2 in zip(this, that): + if v1 != v2: + return False + return True + + def __eq__(self, other): + """Equality of versions is defined by equality of id, date, and list + equality of the predecessors and successors.""" + return (self.id == other.id + and self.date == other.date + and Version._eq_list(self.predecessors, other.predecessors) + and Version._eq_list(self.successors, other.successors)) diff --git a/src/linkahead/configuration.py b/src/linkahead/configuration.py new file mode 100644 index 0000000000000000000000000000000000000000..81642fd8926bb1ab5dae64ad5032eab54c63b559 --- /dev/null +++ b/src/linkahead/configuration.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +import os +import warnings + +import yaml + +try: + optional_jsonschema_validate = None + from jsonschema import validate as optional_jsonschema_validate +except ImportError: + pass + +from configparser import ConfigParser +from os import environ, getcwd +from os.path import expanduser, isfile, join + + +def _reset_config(): + global _pycaosdbconf + _pycaosdbconf = ConfigParser(allow_no_value=False) + + +def configure(inifile): + """read config from file. + + Return a list of files which have successfully been parsed. + """ + global _pycaosdbconf + if "_pycaosdbconf" not in globals(): + _pycaosdbconf = None + if _pycaosdbconf is None: + _reset_config() + read_config = _pycaosdbconf.read(inifile) + validate_yaml_schema(config_to_yaml(_pycaosdbconf)) + + if "HTTPS_PROXY" in environ: + _pycaosdbconf["Connection"]["https_proxy"] = environ["HTTPS_PROXY"] + if "HTTP_PROXY" in environ: + _pycaosdbconf["Connection"]["http_proxy"] = environ["HTTP_PROXY"] + return read_config + + +def get_config(): + global _pycaosdbconf + return _pycaosdbconf + + +def config_to_yaml(config): + valobj = {} + for s in config.sections(): + valobj[s] = {} + for key, value in config[s].items(): + # TODO: Can the type be inferred from the config object? + if key in ["timeout", "debug"]: + valobj[s][key] = int(value) + elif key in ["ssl_insecure"]: + valobj[s][key] = bool(value) + else: + valobj[s][key] = value + + return valobj + + +def validate_yaml_schema(valobj): + if optional_jsonschema_validate: + with open(os.path.join(os.path.dirname(__file__), "schema-pycaosdb-ini.yml")) as f: + schema = yaml.load(f, Loader=yaml.SafeLoader) + optional_jsonschema_validate(instance=valobj, schema=schema["schema-pycaosdb-ini"]) + else: + warnings.warn(""" + Warning: The validation could not be performed because `jsonschema` is not installed. + """) + + +def _read_config_files(): + """Read config files from different paths. + + Read the config from either ``$PYCAOSDBINI`` or home directory (``~/.pylinkahead.ini``), and + additionally adds config from a config file in the current working directory + (``pylinkahead.ini``). + If deprecated names are used (starting with 'pycaosdb'), those used in addition but the files + with the new naming scheme take precedence. + All of these files are optional. + + Returns + ------- + + ini files: list + The successfully parsed ini-files. Order: environment variable or home directory, then cwd. + Used for testing the function. + + """ + return_var = [] + ini_user = expanduser('~/.pylinkahead.ini') + ini_cwd = join(getcwd(), "pylinkahead.ini") + # LinkAhead rename block ################################################## + ini_user_caosdb = expanduser('~/.pycaosdb.ini') + ini_cwd_caosdb = join(getcwd(), "pycaosdb.ini") + if os.path.exists(ini_user_caosdb): + warnings.warn("\n\nYou have a config file with the old naming scheme (pycaosdb.ini). " + f"Please use the new version and rename\n" + f" {ini_user_caosdb}\nto\n {ini_user}", DeprecationWarning) + if os.path.exists(ini_cwd_caosdb): + warnings.warn("\n\nYou have a config file with the old naming scheme (pycaosdb.ini). " + f"Please use the new version and rename\n" + f" {ini_cwd_caosdb}\nto\n {ini_cwd}", DeprecationWarning) + # End: LinkAhead rename block ################################################## + + if "PYCAOSDBINI" in environ: + return_var.extend(configure(expanduser(environ["PYCAOSDBINI"]))) + else: + if isfile(ini_user_caosdb): + return_var.extend(configure(ini_user_caosdb)) + if isfile(ini_user): + return_var.extend(configure(ini_user)) + if isfile(ini_cwd): + return_var.extend(configure(ini_cwd)) + if isfile(ini_cwd_caosdb): + return_var.extend(configure(ini_cwd_caosdb)) + return return_var diff --git a/unittests/test_configs/pycaosdb-empty.ini b/src/linkahead/connection/__init__.py similarity index 100% rename from unittests/test_configs/pycaosdb-empty.ini rename to src/linkahead/connection/__init__.py diff --git a/src/linkahead/connection/authentication/__init__.py b/src/linkahead/connection/authentication/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/linkahead/connection/authentication/auth_token.py b/src/linkahead/connection/authentication/auth_token.py new file mode 100644 index 0000000000000000000000000000000000000000..bf2b76345a62f06de2bdbaf25b6b1e7e17dc8fc5 --- /dev/null +++ b/src/linkahead/connection/authentication/auth_token.py @@ -0,0 +1,96 @@ +#! -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# Copyright (C) 2020 Timm Fitschen <f.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""auth_token. + +An Authentictor which only uses only a pre-supplied authentication token. +""" +from __future__ import absolute_import, unicode_literals, print_function +from .interface import AbstractAuthenticator, CaosDBServerConnection +from ..utils import auth_token_to_cookie +from ...exceptions import LoginFailedError + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which only uses a pre-supplied authentication + token. + + Returns + ------- + AuthTokenAuthenticator + """ + return AuthTokenAuthenticator() + + +class AuthTokenAuthenticator(AbstractAuthenticator): + """AuthTokenAuthenticator. + + Subclass of AbstractAuthenticator which provides authentication only via + a given authentication token. + + Methods + ------- + login + logout + configure + """ + + def __init__(self): + super(AuthTokenAuthenticator, self).__init__() + self.auth_token = None + self._connection = None + + def login(self): + self._login() + + def _login(self): + raise LoginFailedError("The authentication token is expired or you " + "have been logged out otherwise. The " + "auth_token authenticator cannot log in " + "again. You must provide a new " + "authentication token.") + + def logout(self): + self._logout() + + def _logout(self): + self.logger.debug("[LOGOUT]") + if self.auth_token is not None: + headers = {'Cookie': auth_token_to_cookie(self.auth_token)} + self._connection.request(method="DELETE", path="logout", + headers=headers) + self.auth_token = None + + def configure(self, **config): + if "auth_token" in config: + self.auth_token = config["auth_token"] + if "connection" in config: + self._connection = config["connection"] + if not isinstance(self._connection, CaosDBServerConnection): + raise Exception("""Bad configuration of the LinkAhead connection. + The `connection` must be an instance of + `LinkAheadConnection`.""") diff --git a/src/linkahead/connection/authentication/external_credentials_provider.py b/src/linkahead/connection/authentication/external_credentials_provider.py new file mode 100644 index 0000000000000000000000000000000000000000..3d1b8afa17f58a87f09afba90c4bc7ae6dcba693 --- /dev/null +++ b/src/linkahead/connection/authentication/external_credentials_provider.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""external_credentials_provider.""" +from __future__ import absolute_import, unicode_literals +from abc import ABCMeta +import logging +from .plain import PlainTextCredentialsProvider + +# meta class compatible with Python 2 *and* 3: +ABC = ABCMeta(str('ABC'), (object, ), {str('__slots__'): ()}) + + +class ExternalCredentialsProvider(PlainTextCredentialsProvider, ABC): + """ExternalCredentialsProvider. + + Abstract subclass of PlainTextCredentialsProvider which should be used to + implement external credentials provider (e.g. pass, keyring, or any other call + to an external program, which presents the plain text password, which is to be + used for the authentication. + + Parameters + ---------- + callback: Function + A function which has **kwargs argument. This funktion will be called + each time a password is needed with the current connection + configuration as parameters. + """ + + def __init__(self, callback): + super(ExternalCredentialsProvider, self).__init__() + self._callback = callback + self._config = None + + def configure(self, **config): + """configure. + + Parameters + ---------- + **config + Keyword arguments containing the necessary arguments for the + concrete implementation of this class. + + Attributes + ---------- + password : str + The password. This password is not stored in this class. A callback + is called to provide the password each time this property is + called. + + Returns + ------- + None + """ + if "password" in config: + if "password_method" in config: + authm = "`{}`".format(config["password_method"]) + else: + authm = "an external credentials provider" + self.logger.log(logging.WARNING, + ("`password` defined. You configured LinkAhead to " + "use %s as authentication method and yet " + "provided a password yourself. This indicates " + "a misconfiguration (e.g. in your " + "pylinkahead.ini) and should be avoided."), + authm) + self._config = dict(config) + super(ExternalCredentialsProvider, self).configure(**config) + + @property + def password(self): + return self._callback(**self._config) diff --git a/src/linkahead/connection/authentication/input.py b/src/linkahead/connection/authentication/input.py new file mode 100644 index 0000000000000000000000000000000000000000..2799207354b3949063461229d7d465e8a83c83ae --- /dev/null +++ b/src/linkahead/connection/authentication/input.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""input. + +A CredentialsProvider which reads the password from the input line. +""" +from __future__ import absolute_import, unicode_literals, print_function +from .interface import CredentialsProvider, CredentialsAuthenticator + +import getpass + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which uses the input for username/password credentials. + + Returns + ------- + CredentialsAuthenticator + with an InputCredentialsProvider as back-end. + """ + return CredentialsAuthenticator(InputCredentialsProvider()) + + +class InputCredentialsProvider(CredentialsProvider): + """InputCredentialsProvider. + + A class for obtaining the password directly from the user. + + Methods + ------- + configure + + Attributes + ---------- + password + username + """ + + def __init__(self): + super(InputCredentialsProvider, self).__init__() + self._password = None + self._username = None + + def configure(self, **config): + """configure. + + Parameters + ---------- + **config + Keyword arguments containing at least keywords "username" and "password". + + Returns + ------- + None + """ + if config.get("username"): + self._username = config["username"] + else: + self._username = input("Please enter the user name: ") + + url = config["url"] + self._password = getpass.getpass( + "Please enter the password for `{}` at `{}`: ".format( + self._username, url)) + + @property + def password(self): + return self._password + + @property + def username(self): + return self._username diff --git a/src/linkahead/connection/authentication/interface.py b/src/linkahead/connection/authentication/interface.py new file mode 100644 index 0000000000000000000000000000000000000000..6de43b81f441ab60401c1c01885eaa514790d3de --- /dev/null +++ b/src/linkahead/connection/authentication/interface.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""This module provides the interfaces for authenticating requests to the +LinkAhead server. + +Implementing modules muts provide a `get_authentication_provider()` method. +""" +from abc import ABCMeta, abstractmethod, abstractproperty +import logging +from ..utils import urlencode +from ..interface import CaosDBServerConnection +from ..utils import parse_auth_token, auth_token_to_cookie +from ...exceptions import LoginFailedError + +# meta class compatible with Python 2 *and* 3: +ABC = ABCMeta('ABC', (object, ), {'__slots__': ()}) + +_LOGGER = logging.getLogger(__name__) + + +class AbstractAuthenticator(ABC): + """AbstractAuthenticator. + + Interface for different authentication mechanisms. e.g. username/password + authentication or SSH key authentication. + + Attributes + ---------- + logger : Logger + A logger which should be used for all logging which has to do with + authentication. + auth_token : str + A string representation of a LinkAhead Auth Token. + + Methods + ------- + login (abstract) + logout (abstract) + configure (abstract) + on_request + on_response + + """ + + def __init__(self): + self.auth_token = None + self.logger = _LOGGER + + @abstractmethod + def login(self): + """login. + + To be implemented by the child classes. + + Returns + ------- + None + """ + pass + + @abstractmethod + def logout(self): + """logout. + + To be implemented by the child classes. + + Returns + ------- + None + """ + pass + + @abstractmethod + def configure(self, **config): + """configure. + + Configure this authenticator. + + Parameters + ---------- + **config + Keyword arguments for the configuration. + + Returns + ------- + None + """ + pass + + def on_response(self, response): + """on_response. + + A call-back with is to be called by the connection after each + response. This method reads the latest auth cookie from the response. + + Parameters + ---------- + response : CaosDBHTTPResponse + The response of the server + + Returns + ------- + """ + self.auth_token = parse_auth_token( + response.getheader("Set-Cookie")) + + def on_request(self, method, path, headers, **kwargs): + # pylint: disable=unused-argument + """on_request. + + A call-back which is to be called by the connection before each + request. This method set the auth cookie for that request. + + Parameters + ---------- + method : str + The request method. + path : str + The request path. + headers : dict + A dictionary with headers which are to be set. + **kwargs + Ignored + + Returns + ------- + """ + if self.auth_token is None: + self.login() + if self.auth_token is not None: + headers['Cookie'] = auth_token_to_cookie(self.auth_token) + + +class CredentialsAuthenticator(AbstractAuthenticator): + """CredentialsAuthenticator. + + Subclass of AbstractAuthenticator which provides authentication via + credentials (username/password). This class always needs a + credentials_provider which provides valid credentials_provider before each + login. + + Parameters + ---------- + credentials_provider : CredentialsProvider + The source for the username and the password. + + Methods + ------- + login + logout + configure + """ + + def __init__(self, credentials_provider): + super(CredentialsAuthenticator, self).__init__() + self._credentials_provider = credentials_provider + self._connection = None + self.auth_token = None + + def login(self): + self._login() + + def logout(self): + self._logout() + + def _logout(self): + self.logger.debug("[LOGOUT]") + if self.auth_token is not None: + self._connection.request(method="DELETE", path="logout") + self.auth_token = None + + def _login(self): + username = self._credentials_provider.username + password = self._credentials_provider.password + self.logger.debug("[LOGIN] %s", username) + + # we need a username for this: + if username is None: + raise LoginFailedError("No username was given.") + if password is None: + raise LoginFailedError("No password was given") + + headers = {} + headers["Content-Type"] = "application/x-www-form-urlencoded" + body = urlencode({"username": username, "password": password}) + response = self._connection.request(method="POST", + path="login", + headers=headers, body=body) + + response.read() # clear socket + if response.status != 200: + raise LoginFailedError("LOGIN WAS NOT SUCCESSFUL") + self.on_response(response) + return response + + def configure(self, **config): + self._credentials_provider.configure(**config) + if "connection" in config: + self._connection = config["connection"] + if not isinstance(self._connection, CaosDBServerConnection): + raise Exception("""Bad configuration of the LinkAhead connection. + The `connection` must be an instance of + `LinkAheadConnection`.""") + + +class CredentialsProvider(ABC): + """CredentialsProvider. + + An abstract class for username/password authentication. + + Attributes + ---------- + password (abstract) + username (abstract) + logger : Logger + A logger which should be used for all logging which has to do with the + provision of credentials. This is usually just the "authentication" + logger. + + Methods + ------- + configure (abstract) + """ + + def __init__(self): + self.logger = _LOGGER + + @abstractmethod + def configure(self, **config): + """configure. + + Configure the credentials provider with a dict. + + Parameters + ---------- + **config + Keyword arguments. The relevant arguments depend on the + implementing subclass of this class. + Returns + ------- + None + """ + + @abstractproperty + def password(self): + """password.""" + + @abstractproperty + def username(self): + """username.""" diff --git a/src/linkahead/connection/authentication/keyring.py b/src/linkahead/connection/authentication/keyring.py new file mode 100644 index 0000000000000000000000000000000000000000..202520bbab7e940ccce6517e640eff5904039553 --- /dev/null +++ b/src/linkahead/connection/authentication/keyring.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""keyring. + +A CredentialsProvider which uses the external 'keyring' library to +retrieve the password. +""" + +import sys +import importlib +from getpass import getpass +from ...exceptions import ConfigurationError +from .external_credentials_provider import ExternalCredentialsProvider +from .interface import CredentialsAuthenticator + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which uses plain text username/password credentials. + The difference to the `plain` module is that this implementation retrieves + the password from the external gnome keyring. + + Returns + ------- + CredentialsAuthenticator + with a 'KeyringCaller' as back-end. + """ + return CredentialsAuthenticator(KeyringCaller(callback=_call_keyring)) + + +def _get_external_keyring(): + try: + return importlib.import_module("keyring") + except ImportError: + raise RuntimeError( + "The keyring password method requires installation of the" + "keyring python package. On linux with python < 3.5, " + "this requires the installation of dbus-python as a " + "system package.") + + +def _call_keyring(**config): + if "username" not in config: + raise ConfigurationError("Your configuration did not provide a " + "`username` which is needed by the " + "`KeyringCaller` to retrieve the " + "password in question.") + url = config.get("url") + username = config.get("username") + app = "linkahead — {}".format(url) + external_keyring = _get_external_keyring() + password = external_keyring.get_password(app, username) + if password is None: + print("No password for user {} on {} found in keyring." + .format(username, app)) + password = getpass("Enter password to save " + "in system keyring/wallet: ") + external_keyring.set_password(app, username, password) + return password + + +class KeyringCaller(ExternalCredentialsProvider): + """KeyringCaller. + + A class for retrieving the password from the external 'gnome keyring' and + storing the username/password credentials as plain text strings. + + Methods + ------- + configure + + Attributes + ---------- + password + username + """ diff --git a/src/linkahead/connection/authentication/pass.py b/src/linkahead/connection/authentication/pass.py new file mode 100644 index 0000000000000000000000000000000000000000..bec307401f945a6cd2e223195e0cce2396602061 --- /dev/null +++ b/src/linkahead/connection/authentication/pass.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""pass. + +A CredentialsProvider which calls the external program 'pass' for the +password. +""" + +from subprocess import check_output, CalledProcessError +from ...exceptions import ConfigurationError +from .interface import CredentialsAuthenticator +from .external_credentials_provider import ExternalCredentialsProvider + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which uses plain text username/password credentials. + The difference to the `plain` module is that this implementation retrieves + the password from the external program 'pass'. + + Returns + ------- + CredentialsAuthenticator + with a 'PassCaller' as back-end. + """ + return CredentialsAuthenticator(PassCaller(callback=_call_pass)) + + +def _call_pass(**config): + if "password_identifier" not in config: + raise ConfigurationError("Your configuration did not provide a " + "`password_identifier` which is needed " + "by the `PassCaller` to retrieve the " + "password in question.") + + try: + return check_output( + "pass " + config["password_identifier"], + shell=True).splitlines()[0].decode("UTF-8") + except CalledProcessError as exc: + raise RuntimeError( + "Password manager returned error code {}. This usually " + "occurs if the password_identifier in .pylinkahead.ini is " + "incorrect or missing.".format(exc.returncode)) + + +class PassCaller(ExternalCredentialsProvider): + """PassCaller. + + A class for retrieving the password from the external program 'pass' and + storing the username/password credentials as plain text strings. + + Methods + ------- + configure + + Attributes + ---------- + password + username + """ + # all the work is done in _call_pass and the super class + pass diff --git a/src/linkahead/connection/authentication/plain.py b/src/linkahead/connection/authentication/plain.py new file mode 100644 index 0000000000000000000000000000000000000000..162cd365b76349c4baf1831900236c832c593547 --- /dev/null +++ b/src/linkahead/connection/authentication/plain.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""plain. + +A CredentialsProvider which reads the password from the configuration +dict. +""" +from __future__ import absolute_import, unicode_literals, print_function +from .interface import CredentialsProvider, CredentialsAuthenticator + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which uses plain text username/password credentials. + + Returns + ------- + CredentialsAuthenticator + with a PlainTextCredentialsProvider as back-end. + """ + return CredentialsAuthenticator(PlainTextCredentialsProvider()) + + +class PlainTextCredentialsProvider(CredentialsProvider): + """PlainTextCredentialsProvider. + + A class for storing username/password credentials as plain text strings. + + Methods + ------- + configure + + Attributes + ---------- + password + username + """ + + def __init__(self): + super(PlainTextCredentialsProvider, self).__init__() + self._password = None + self._username = None + + def configure(self, **config): + """configure. + + Parameters + ---------- + **config + Keyword arguments containing at least keywords "username" and "password". + + Returns + ------- + None + """ + if "password" in config: + self._password = config["password"] + if "username" in config: + self._username = config["username"] + + @property + def password(self): + return self._password + + @property + def username(self): + return self._username diff --git a/src/linkahead/connection/authentication/unauthenticated.py b/src/linkahead/connection/authentication/unauthenticated.py new file mode 100644 index 0000000000000000000000000000000000000000..590b7914199eb47bd2dbe064106ad6215255ad4f --- /dev/null +++ b/src/linkahead/connection/authentication/unauthenticated.py @@ -0,0 +1,120 @@ +#! -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# Copyright (C) 2020 Timm Fitschen <f.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""unauthenticated. + +An Authenticator which suppresses any authentication and also ignores auth_token +cookies. +""" +from __future__ import absolute_import, print_function, unicode_literals + +from ...exceptions import LoginFailedError +from .interface import AbstractAuthenticator, CaosDBServerConnection + + +def get_authentication_provider(): + """get_authentication_provider. + + Return an authenticator which only uses a pre-supplied authentication + token. + + Returns + ------- + AuthTokenAuthenticator + """ + return Unauthenticated() + + +class Unauthenticated(AbstractAuthenticator): + """Unauthenticated. + + Subclass of AbstractAuthenticator which suppresses any authentication and + also ignores auth_token cookies. + + Methods + ------- + login + logout + configure + on_request + on_response + """ + + def __init__(self): + super(Unauthenticated, self).__init__() + self.auth_token = None + self._connection = None + + def login(self): + self._login() + + def _login(self): + raise LoginFailedError("This LinkAhead client is configured to stay " + "unauthenticated. Change your " + "`password_method` and provide an " + "`auth_token` or credentials if you want " + "to authenticate this client.") + + def logout(self): + self._logout() + + def _logout(self): + self.auth_token = None + + def configure(self, **config): + self.auth_token = None + + def on_request(self, method, path, headers, **kwargs): + # pylint: disable=unused-argument + """on_request. + + This implementation does not attempt to login or authenticate in any + form. + + Parameters + ---------- + method + unused + path + unused + headers + unused + **kwargs + unused + """ + pass + + def on_response(self, response): + # pylint: disable=unused-argument + """on_response. + + This implementation ignores any auth_token cookie sent by the server. + + Parameters + ---------- + response + unused + """ + pass diff --git a/src/linkahead/connection/connection.py b/src/linkahead/connection/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..db6b66f17dd3eb8c4415119912d5586c6543b953 --- /dev/null +++ b/src/linkahead/connection/connection.py @@ -0,0 +1,712 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (c) 2019 Daniel Hornung +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""Connection to a LinkAhead server.""" +from __future__ import absolute_import, print_function, unicode_literals + +import logging +import ssl +import sys +import warnings +from builtins import str # pylint: disable=redefined-builtin +from errno import EPIPE as BrokenPipe +from socket import error as SocketError +from urllib.parse import quote, urlparse +from warnings import warn + +from requests import Session as HTTPSession +from requests.adapters import HTTPAdapter +from requests.exceptions import ConnectionError as HTTPConnectionError +from urllib3.poolmanager import PoolManager + +from ..configuration import get_config +from ..exceptions import (ConfigurationError, HTTPClientError, + HTTPForbiddenError, HTTPResourceNotFoundError, + HTTPServerError, HTTPURITooLongError, + LinkAheadConnectionError, LinkAheadException, + LoginFailedError) + +try: + from ..version import version +except ModuleNotFoundError: + version = "uninstalled" + +from pkg_resources import resource_filename + +from .encode import MultipartYielder, ReadableMultiparts +from .interface import CaosDBHTTPResponse, CaosDBServerConnection +from .utils import make_uri_path, parse_url, urlencode + +_LOGGER = logging.getLogger(__name__) + + +class _WrappedHTTPResponse(CaosDBHTTPResponse): + + def __init__(self, response): + self.response = response + self._generator = None + self._buffer = b'' + self._stream_consumed = False + + @property + def reason(self): + return self.response.reason + + @property + def status(self): + return self.response.status_code + + def read(self, size=None): + if self._stream_consumed is True: + raise RuntimeError("Stream is consumed") + + if self._buffer is None: + # the buffer has been drained in the previous call. + self._stream_consumed = True + return b'' + + if self._generator is None and (size is None or size == 0): + # return full content at once + self._stream_consumed = True + return self.response.content + + if len(self._buffer) >= size: + # still enough bytes in the buffer + result = chunk[:size] + self._buffer = chunk[size:] + return result + + if self._generator is None: + # first call to this method + if size is None or size == 0: + size = 512 + self._generator = self.response.iter_content(size) + + try: + # read new data into the buffer + chunk = self._buffer + next(self._generator) + result = chunk[:size] + if len(result) == 0: + self._stream_consumed = True + self._buffer = chunk[size:] + return result + except StopIteration: + # drain buffer + result = self._buffer + self._buffer = None + return result + + def getheader(self, name, default=None): + return self.response.headers[name] if name in self.response.headers else default + + def getheaders(self): + return self.response.headers.items() + + def close(self): + self.response.close() + + +class _SSLAdapter(HTTPAdapter): + """Transport adapter that allows us to use different SSL versions.""" + + def __init__(self, ssl_version): + self.ssl_version = ssl_version + super().__init__() + + def init_poolmanager(self, connections, maxsize, block=False): + self.poolmanager = PoolManager( + num_pools=connections, maxsize=maxsize, + block=block, ssl_version=self.ssl_version) + + +class _DefaultCaosDBServerConnection(CaosDBServerConnection): + """_DefaultCaosDBServerConnection. + + Methods + ------- + configure + request + """ + + def __init__(self): + self._useragent = ("linkahead-pylib/{version} - {implementation}".format( + version=version, implementation=type(self).__name__)) + self._base_path = None + self._session = None + self._timeout = None + + def request(self, method, path, headers=None, body=None): + """request. + + Send a HTTP request to the server. + + Parameters + ---------- + method : str + The HTTP request method. + path : str + An URI path segment (without the 'scheme://host:port/' parts), + including query and frament segments. + headers : dict of str -> str, optional + HTTP request headers. (Defautl: None) + body : str or bytes or readable, optional + The body of the HTTP request. Bytes should be a utf-8 encoded + string. + + Returns + ------- + response : CaosDBHTTPResponse + """ + + if headers is None: + headers = {} + headers["User-Agent"] = self._useragent + + if path.endswith("/."): + path = path[:-1] + "%2E" + + if isinstance(body, MultipartYielder): + body = ReadableMultiparts(body) + + try: + response = self._session.request( + method=method, + url=self._base_path + path, + headers=headers, + data=body, + timeout=self._timeout, + stream=True) + + return _WrappedHTTPResponse(response) + except HTTPConnectionError as conn_err: + raise LinkAheadConnectionError( + "Connection failed. Network or server down? " + str(conn_err) + ) + + def configure(self, **config): + """configure. + + Configure the http connection. + + Parameters + ---------- + cacert : str + Path to the CA certificate which will be used to identify the + server. + url : str + The url of the LinkAhead Server, e.g. + `https://example.com:443/rootpath`, including a possible root path. + **config : + Any further keyword arguments are being ignored. + + Raises + ------ + LinkAheadConnectionError + If no url has been specified, or if the CA certificate cannot be + loaded. + """ + + if "url" not in config: + raise LinkAheadConnectionError( + "No connection url specified. Please " + "do so via linkahead.configure_connection(...) or in a config " + "file.") + if (not config["url"].lower().startswith("https://") and not config["url"].lower().startswith("http://")): + raise LinkAheadConnectionError("The connection url is expected " + "to be a http or https url and " + "must include the url scheme " + "(i.e. start with https:// or " + "http://).") + + url = urlparse(config["url"]) + path = url.path.strip("/") + if len(path) > 0: + path = path + "/" + self._base_path = url.scheme + "://" + url.netloc + "/" + path + + self._session = HTTPSession() + + if url.scheme == "https": + self._setup_ssl(config) + + # TODO(tf) remove in next release + socket_proxy = config["socket_proxy"] if "socket_proxy" in config else None + if socket_proxy is not None: + self._session.proxies = { + "https": "socks5://" + socket_proxy, + "http": "socks5://" + socket_proxy, + } + + if "https_proxy" in config: + if self._session.proxies is None: + self._session.proxies = {} + self._session.proxies["https"] = config["https_proxy"] + + if "http_proxy" in config: + if self._session.proxies is None: + self._session.proxies = {} + self._session.proxies["http"] = config["http_proxy"] + + if "timeout" in config: + self._timeout = config["timeout"] + + def _setup_ssl(self, config): + if "ssl_version" in config and config["cacert"] is not None: + ssl_version = getattr(ssl, config["ssl_version"]) + else: + ssl_version = ssl.PROTOCOL_TLS + + self._session.mount(self._base_path, _SSLAdapter(ssl_version)) + + verify = True + if "cacert" in config: + verify = config["cacert"] + if "ssl_insecure" in config and config["ssl_insecure"]: + _LOGGER.warning("*** Warning! ***\n" + "Insecure SSL mode, certificate will not be checked! " + "Please consider removing the `ssl_insecure` configuration option.\n" + "****************") + warnings.filterwarnings(action="ignore", module="urllib3", + message="Unverified HTTPS request is being made") + verify = False + if verify is not None: + self._session.verify = verify + + +def _make_conf(*conf): + """_make_conf. + + Merge several config dicts into one. The precedence goes to latter dicts in + the function call. + + Parameters + ---------- + *conf : dict + One ore more dicts with lower case option names (i.e. keys). + + Returns + ------- + dict + A merged config dict. + """ + result = {} + + for conf_dict in conf: + result.update(conf_dict) + + return result + + +_DEFAULT_CONF = { + "password_method": "input", + "implementation": _DefaultCaosDBServerConnection, + "timeout": 210, +} + + +def _get_authenticator(**config): + """_get_authenticator. + + Import and configure the password_method. + + Parameters + ---------- + password_method : str + The simple name of a submodule of linkahead.connection.authentication. + Currently, there are four valid values for this parameter: 'plain', + 'pass', 'keyring' and 'auth_token'. + **config : + Any other keyword arguments are passed the configre method of the + password_method. + + Returns + ------- + AbstractAuthenticator + An object which implements the password_method and which already + configured. + + Raises + ------ + ConfigurationError + If the password_method string cannot be resolved to a LinkaheadAuthenticator + class. + """ + auth_module = ("linkahead.connection.authentication." + + config["password_method"]) + _LOGGER.debug("import auth_module %s", auth_module) + try: + __import__(auth_module) + + auth_provider = sys.modules[auth_module].get_authentication_provider() + auth_provider.configure(**config) + + return auth_provider + + except ImportError: + raise ConfigurationError("Password method \"{}\" not implemented. " + "Try `plain`, `pass`, `keyring`, or " + "`auth_token`." + .format(config["password_method"])) + + +def configure_connection(**kwargs): + """Configures the LinkAhead connection and returns the Connection object. + + The effective configuration is governed by the default values (see + 'Parameters'), the global configuration (see `linkahead.get_config()`) and the + parameters which are passed to this function, with ascending priority. + + The parameters which are listed here, are possibly not sufficient for a + working configuration of the connection. Check the `configure` method of + the implementation class and the password_method for more details. + + Parameters + ---------- + url : str + The url of the LinkAhead Server. HTTP and HTTPS urls are allowed. However, + it is **highly** recommend to avoid HTTP because passwords and + authentication token are send over the network in plain text. + + username : str + Username for login; e.g. 'admin'. + + password : str + Password for login if 'plain' is used as password_method. + + password_method : str + The name of a submodule of linkahead.connection.authentication which + implements the AbstractAuthenticator interface. (Default: 'plain') + Possible values are, for example: + - "plain" Need username and password arguments. + - "input" Asks for the password. + - "pass" Uses the `pass` password manager. + - "keyring" Uses the `keyring` library. + - "auth_token" Uses only a given auth_token. + + timeout : int + A connection timeout in seconds. (Default: 210) + + ssl_insecure : bool + Whether SSL certificate warnings should be ignored. Only use this for + development purposes! (Default: False) + + auth_token : str (optional) + An authentication token which has been issued by the LinkAhead Server. + Implies `password_method="auth_token"` if set. An example token string would be `["O","OneTimeAuthenticationToken","anonymous",["administration"],[],1592995200000,604800000,"3ZZ4WKRB-5I7DG2Q6-ZZE6T64P-VQ","197d0d081615c52dc18fb323c300d7be077beaad4020773bb58920b55023fa6ee49355e35754a4277b9ac525c882bcd3a22e7227ba36dfcbbdbf8f15f19d1ee9",1,30000]`. + + https_proxy : str, optional + Define a proxy for the https connections, e.g. `http://localhost:8888`, + `socks5://localhost:8888`, or `socks4://localhost:8888`. These are + either (non-TLS) HTTP proxies, SOCKS4 proxies, or SOCKS5 proxies. HTTPS + proxies are not supported. However, the connection will be secured + using TLS in the tunneled connection nonetheless. Only the connection + to the proxy is insecure which is why it is not recommended to use HTTP + proxies when authentication against the proxy is necessary. If + unspecified, the https_proxy option of the pylinkahead.ini or the HTTPS_PROXY + environment variable are being used. Use `None` to override these + options with a no-proxy setting. + + http_proxy : str, optional + Define a proxy for the http connections, e.g. `http://localhost:8888`. + If unspecified, the http_proxy option of the pylinkahead.ini or the + HTTP_PROXY environment variable are being used. Use `None` to override + these options with a no-proxy setting. + + implementation : CaosDBServerConnection + The class which implements the connection. (Default: + _DefaultCaosDBServerConnection) + + Returns + ------- + _Connection + The singleton instance of the _Connection class. + """ + global_conf = {} + conf = get_config() + # Convert config to dict, with preserving types + int_opts = ["timeout"] + bool_opts = ["ssl_insecure"] + + if conf.has_section("Connection"): + global_conf = dict(conf.items("Connection")) + # Integer options + + for opt in int_opts: + if opt in global_conf: + global_conf[opt] = conf.getint("Connection", opt) + # Boolean options + + for opt in bool_opts: + if opt in global_conf: + global_conf[opt] = conf.getboolean("Connection", opt) + local_conf = _make_conf(_DEFAULT_CONF, global_conf, kwargs) + + connection = _Connection.get_instance() + + if "socket_proxy" in local_conf: + warnings.warn("Deprecated configuration option: socket_proxy. Use " + "the new https_proxy option instead", + DeprecationWarning, stacklevel=1) + connection.configure(**local_conf) + + return connection + + +def get_connection(): + """Return the connection. + + If the connection was not configured yet `configure_connection` will + be called inside this function without arguments. + """ + connection = _Connection.get_instance() + + if connection.is_configured: + return connection + + return configure_connection() + + +def _handle_response_status(http_response): + + status = http_response.status + + if status == 200: + return + + # emtpy response buffer + body = http_response.read() + + if status == 404: + raise HTTPResourceNotFoundError("This resource has not been found.") + elif status > 499: + raise HTTPServerError(body=body) + + reason = http_response.reason + standard_message = ("Request failed. The response returned with status " + "{} - {}.".format(status, reason)) + if status == 401: + raise LoginFailedError(standard_message) + elif status == 403: + raise HTTPForbiddenError(standard_message) + elif status in (413, 414): + raise HTTPURITooLongError(standard_message) + elif 399 < status < 500: + raise HTTPClientError(msg=standard_message, status=status, body=body) + else: + raise LinkAheadException(standard_message) + + +class _Connection(object): # pylint: disable=useless-object-inheritance + """This connection class provides the interface to the database connection + allowing for retrieval, insertion, update, etc. of entities, files, users, + roles and much more. + + It wrapps an instance of CaosDBServerConnection which actually does the + work (how, depends on the instance). + + It is a singleton and should not be instanciated or modified by any client. + Use the methods `get_connection` and `configure_connection` for this + purpose. + """ + + __instance = None + + def __init__(self): + self._delegate_connection = None + self._authenticator = None + self.is_configured = False + + @classmethod + def get_instance(cls): + if cls.__instance is None: + cls.__instance = _Connection() + + return cls.__instance + + def configure(self, **config): + self.is_configured = True + + if "implementation" not in config: + raise ConfigurationError( + "Missing CaosDBServerConnection implementation. You did not " + "specify an `implementation` for the connection.") + try: + self._delegate_connection = config["implementation"]() + + if not isinstance(self._delegate_connection, + CaosDBServerConnection): + raise TypeError("The `implementation` callable did not return " + "an instance of CaosDBServerConnection.") + except TypeError as type_err: + raise ConfigurationError( + "Bad CaosDBServerConnection implementation. The " + "implementation must be a callable object which returns an " + "instance of `CaosDBServerConnection` (e.g. a constructor " + "or a factory).\n{}".format(type_err.args[0])) + self._delegate_connection.configure(**config) + + if "auth_token" in config: + # deprecated, needed for older scripts + config["password_method"] = "auth_token" + if "password_method" not in config: + raise ConfigurationError("Missing password_method. You did " + "not specify a `password_method` for" + "the connection.") + self._authenticator = _get_authenticator( + connection=self._delegate_connection, **config) + + return self + + def retrieve(self, entity_uri_segments=None, query_dict=None, **kwargs): + path = make_uri_path(entity_uri_segments, query_dict) + + http_response = self._http_request(method="GET", path=path, **kwargs) + + return http_response + + def delete(self, entity_uri_segments=None, query_dict=None, **kwargs): + path = make_uri_path(entity_uri_segments, query_dict) + + http_response = self._http_request( + method="DELETE", path=path, **kwargs) + + return http_response + + def update(self, entity_uri_segment, query_dict=None, **kwargs): + path = make_uri_path(entity_uri_segment, query_dict) + + http_response = self._http_request(method="PUT", path=path, **kwargs) + + return http_response + + def activate_user(self, link): + self._authenticator.logout() + fullurl = urlparse(link) + path = fullurl.path + query = fullurl.query + http_response = self._http_request( + method="GET", path=path + "?" + query) + + return http_response + + def put_form_data(self, entity_uri_segment, params): + return self._form_data_request( + method="PUT", path=entity_uri_segment, params=params) + + def post_form_data(self, entity_uri_segment, params): + return self._form_data_request( + method="POST", + path=entity_uri_segment, + params=params) + + def _form_data_request(self, method, path, params): + body = urlencode(params) + headers = {} + headers["Content-Type"] = "application/x-www-form-urlencoded" + response = self._http_request( + method=method, + path=quote(path), + body=body, + headers=headers) + + return response + + def insert(self, entity_uri_segment, query_dict=None, body=None, **kwargs): + path = make_uri_path(entity_uri_segment, query_dict) + + http_response = self._http_request( + method="POST", path=path, body=body, **kwargs) + + return http_response + + def download_file(self, path): + """This function downloads a file via HTTP from the LinkAhead file + system.""" + try: + uri_segments = ["FileSystem"] + uri_segments.extend(path.split("/")) + + return self.retrieve(entity_uri_segments=uri_segments) + except HTTPResourceNotFoundError: + raise HTTPResourceNotFoundError("This file does not exist.") + + def _login(self): + self._authenticator.login() + + def _logout(self): + self._authenticator.logout() + + def _http_request(self, method, path, headers=None, body=None, **kwargs): + try: + return self._retry_http_request(method=method, path=path, + headers=headers, body=body, + **kwargs) + except SocketError as e: + if e.errno != BrokenPipe: + raise + + return self._retry_http_request(method=method, path=path, + headers=headers, body=body, + reconnect=False, + **kwargs) + except LoginFailedError: + if kwargs.get("reconnect", True) is True: + self._login() + + return self._retry_http_request(method=method, path=path, + headers=headers, body=body, + reconnect=False, + **kwargs) + raise + + def _retry_http_request(self, method, path, headers, body, **kwargs): + + if hasattr(body, "encode"): + # python3 + body = body.encode("utf-8") + + if headers is None: + headers = {} + self._authenticator.on_request(method=method, path=path, + headers=headers) + _LOGGER.debug("request: %s %s %s", method, path, str(headers)) + http_response = self._delegate_connection.request( + method=method, + path=path, + headers=headers, + body=body) + _LOGGER.debug("response: %s %s", str(http_response.status), + str(http_response.getheaders())) + self._authenticator.on_response(http_response) + _handle_response_status(http_response) + + return http_response + + def get_username(self): + """ + Return the username of the current connection. + + Shortcut for: get_connection()._authenticator._credentials_provider.username + """ + return self._authenticator._credentials_provider.username diff --git a/src/linkahead/connection/encode.py b/src/linkahead/connection/encode.py new file mode 100644 index 0000000000000000000000000000000000000000..6b328285e97e4dce2483ddd955134ee64cd3ce84 --- /dev/null +++ b/src/linkahead/connection/encode.py @@ -0,0 +1,515 @@ +# -*- encoding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +# Original work Copyright (c) 2011 Chris AtLee +# Modified work Copyright (c) 2017 Biomedical Physics, MPI for Dynamics and Self-Organization +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""multipart/form-data encoding module. + +This module provides functions that faciliate encoding name/value pairs +as multipart/form-data suitable for a HTTP POST or PUT request. + +multipart/form-data is the standard way to upload files over HTTP +""" + +__all__ = [ + 'gen_boundary', 'encode_and_quote', 'MultipartParam', 'encode_string', + 'encode_file_header', 'get_body_size', 'get_headers', 'multipart_encode', + 'ReadableMultiparts', +] +from urllib.parse import quote_plus +from io import UnsupportedOperation +import uuid +import re +import os +import mimetypes +from email.header import Header + + +def gen_boundary(): + """Returns a random string to use as the boundary for a message.""" + return uuid.uuid4().hex + + +def encode_and_quote(data): + """If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8")) + otherwise return urllib.quote_plus(data)""" + if data is None: + return None + + return quote_plus(data) + + +class MultipartParam(object): + """Represents a single parameter in a multipart/form-data request. + + ``name`` is the name of this parameter. + + If ``value`` is set, it must be a string or unicode object to use as the + data for this parameter. + + If ``filename`` is set, it is what to say that this parameter's filename + is. Note that this does not have to be the actual filename any local file. + + If ``filetype`` is set, it is used as the Content-Type for this parameter. + If unset it defaults to "text/plain; charset=utf8" + + If ``filesize`` is set, it specifies the length of the file ``fileobj`` + + If ``fileobj`` is set, it must be a file-like object that supports + .read(). + + Both ``value`` and ``fileobj`` must not be set, doing so will + raise a ValueError assertion. + + If ``fileobj`` is set, and ``filesize`` is not specified, then + the file's size will be determined first by stat'ing ``fileobj``'s + file descriptor, and if that fails, by seeking to the end of the file, + recording the current position as the size, and then by seeking back to the + beginning of the file. + + ``callback`` is a callable which will be called from iter_encode with (self, + current_transferred, total), representing the current parameter, current amount + transferred, and the total size. + """ + + def __init__(self, + name, + value=None, + filename=None, + filetype=None, + filesize=None, + fileobj=None, + callback=None): + self.name = Header(name).encode() + self.value = value + if filename is None: + self.filename = None + else: + bfilename = filename.encode("ascii", "xmlcharrefreplace") + self.filename = bfilename.decode("UTF-8").replace('"', '\\"') + + self.filetype = filetype + + self.filesize = filesize + self.fileobj = fileobj + self.callback = callback + + if self.value is not None and self.fileobj is not None: + raise ValueError("Only one of value or fileobj may be specified") + + if fileobj is not None and filesize is None: + # Try and determine the file size + try: + self.filesize = os.fstat(fileobj.fileno()).st_size + except (OSError, AttributeError, UnsupportedOperation): + try: + fileobj.seek(0, 2) + self.filesize = fileobj.tell() + fileobj.seek(0) + except BaseException: + raise ValueError("Could not determine filesize") + + def __cmp__(self, other): + attrs = [ + 'name', 'value', 'filename', 'filetype', 'filesize', 'fileobj' + ] + myattrs = [getattr(self, a) for a in attrs] + oattrs = [getattr(other, a) for a in attrs] + return cmp(myattrs, oattrs) + + def reset(self): + """Reset the file object's read pointer.""" + if self.fileobj is not None: + self.fileobj.seek(0) + elif self.value is None: + raise ValueError("Don't know how to reset this parameter") + + @classmethod + def from_file(cls, paramname, filename): + """Returns a new MultipartParam object constructed from the local file + at ``filename``. + + ``filesize`` is determined by os.path.getsize(``filename``) + + ``filetype`` is determined by mimetypes.guess_type(``filename``)[0] + + ``filename`` is set to os.path.basename(``filename``) + """ + + return cls( + paramname, + filename=os.path.basename(filename), + filetype=mimetypes.guess_type(filename)[0], + filesize=os.path.getsize(filename), + fileobj=open(filename, "rb")) + + @classmethod + def from_params(cls, params): + """Returns a list of MultipartParam objects from a sequence of name, + value pairs, MultipartParam instances, or from a mapping of names to + values. + + The values may be strings or file objects, or MultipartParam + objects. MultipartParam object names must match the given names + in the name,value pairs or mapping, if applicable. + """ + if hasattr(params, 'items'): + params = params.items() + + retval = [] + for item in params: + if isinstance(item, cls): + retval.append(item) + continue + name, value = item + if isinstance(value, cls): + assert value.name == name + retval.append(value) + continue + if hasattr(value, 'read'): + # Looks like a file object + filename = getattr(value, 'name', None) + if filename is not None: + filetype = mimetypes.guess_type(filename)[0] + else: + filetype = None + + retval.append( + cls(name=name, + filename=filename, + filetype=filetype, + fileobj=value)) + else: + retval.append(cls(name, value)) + return retval + + def encode_hdr(self, boundary): + """Returns the header of the encoding of this parameter.""" + boundary = encode_and_quote(boundary) + + headers = ["--%s" % boundary] + + if self.filename: + disposition = 'form-data; name="%s"; filename="%s"' % ( + self.name, self.filename) + else: + disposition = 'form-data; name="%s"' % self.name + + headers.append("Content-Disposition: %s" % disposition) + + if self.filetype: + filetype = self.filetype + else: + filetype = "text/plain; charset=utf-8" + + headers.append("Content-Type: %s" % filetype) + + headers.append("") + headers.append("") + + return "\r\n".join(headers) + + def encode(self, boundary): + """Returns the string encoding of this parameter.""" + if self.value is None: + value = self.fileobj.read() + else: + value = self.value + + if re.search("^--%s$" % re.escape(boundary), value, re.M): + raise ValueError("boundary found in encoded string") + + return "%s%s\r\n" % (self.encode_hdr(boundary), value) + + def iter_encode(self, boundary, blocksize=4096): + """Yields the encoding of this parameter If self.fileobj is set, then + blocks of ``blocksize`` bytes are read and yielded.""" + total = self.get_size(boundary) + current_transferred = 0 + if self.value is not None: + block = self.encode(boundary) + current_transferred += len(block) + yield block + if self.callback: + self.callback(self, current_transferred, total) + else: + block = self.encode_hdr(boundary) + current_transferred += len(block) + yield block + if self.callback: + self.callback(self, current_transferred, total) + last_block = b"" + encoded_boundary = "--%s" % encode_and_quote(boundary) + boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary), + re.M) + while True: + block = self.fileobj.read(blocksize) + if not block: + current_transferred += 2 + yield "\r\n" + if self.callback: + self.callback(self, current_transferred, total) + break + last_block += block + if boundary_exp.search(last_block.decode("ascii", "ignore")): + raise ValueError("boundary found in file data") + last_block = last_block[-len(encoded_boundary) - 2:] + current_transferred += len(block) + yield block + if self.callback: + self.callback(self, current_transferred, total) + + def get_size(self, boundary): + """Returns the size in bytes that this param will be when encoded with + the given boundary.""" + if self.filesize is not None: + valuesize = self.filesize + else: + valuesize = len(self.value) + + return len(self.encode_hdr(boundary)) + 2 + valuesize + + +def encode_string(boundary, name, value): + """Returns ``name`` and ``value`` encoded as a multipart/form-data + variable. + + ``boundary`` is the boundary string used throughout a single request + to separate variables. + """ + + return MultipartParam(name, value).encode(boundary) + + +def encode_file_header(boundary, + paramname, + filesize, + filename=None, + filetype=None): + """Returns the leading data for a multipart/form-data field that contains + file data. + + ``boundary`` is the boundary string used throughout a single request to + separate variables. + + ``paramname`` is the name of the variable in this request. + + ``filesize`` is the size of the file data. + + ``filename`` if specified is the filename to give to this field. This + field is only useful to the server for determining the original filename. + + ``filetype`` if specified is the MIME type of this file. + + The actual file data should be sent after this header has been sent. + """ + + return MultipartParam( + paramname, filesize=filesize, filename=filename, + filetype=filetype).encode_hdr(boundary) + + +def get_body_size(params, boundary): + """Returns the number of bytes that the multipart/form-data encoding of + ``params`` will be.""" + size = sum( + p.get_size(boundary) for p in MultipartParam.from_params(params)) + return size + len(boundary) + 6 + + +def get_headers(params, boundary): + """Returns a dictionary with Content-Type and Content-Length headers for + the multipart/form-data encoding of ``params``.""" + headers = {} + boundary = quote_plus(boundary) + headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary + headers['Content-Length'] = str(get_body_size(params, boundary)) + return headers + + +class MultipartYielder(object): + """An iterator that yields the parameters of a multipart/formdata http + body.""" + + def __init__(self, params, boundary, callback): + self.params = params + self.boundary = boundary + self.callback = callback + + self.i = 0 + self.current_part = None + self.param_iter = None + self.current_transferred = 0 + self.total = get_body_size(params, boundary) + + def __iter__(self): + return self + + # since python 3 + def __next__(self): + return self.next() + + def next(self): + """generator function to yield multipart/form-data representation of + parameters.""" + if self.param_iter is not None: + try: + block = next(self.param_iter) + self.current_transferred += len(block) + if self.callback: + self.callback(self.current_part, + self.current_transferred, self.total) + return block + except StopIteration: + self.current_part = None + self.param_iter = None + + if self.i is None: + raise StopIteration + elif self.i >= len(self.params): + self.param_iter = None + self.current_part = None + self.i = None + block = "--%s--\r\n" % self.boundary + self.current_transferred += len(block) + if self.callback: + self.callback(self.current_part, + self.current_transferred, self.total) + return block + + self.current_part = self.params[self.i] + self.param_iter = self.current_part.iter_encode(self.boundary) + self.i += 1 + return next(self) + + def reset(self): + """Reset the iterator.""" + self.i = 0 + self.current_transferred = 0 + for param in self.params: + param.reset() + + +def multipart_encode(params, boundary=None, callback=None): + """Encode ``params`` as multipart/form-data. + + ``params`` should be a sequence of (name, value) pairs or MultipartParam + objects, or a mapping of names to values. + Values are either strings parameter values, or file-like objects to use as + the parameter value. The file-like objects must support .read() and either + .fileno() or both .seek() and .tell(). + + If ``boundary`` is set, then it as used as the MIME boundary. Otherwise + a randomly generated boundary will be used. In either case, if the + boundary string appears in the parameter values a ValueError will be + raised. + + If ``callback`` is set, it should be a callback which will get called as blocks + of data are encoded. It will be called with (param, current_transferred, total), + indicating the current parameter being encoded, the current amount encoded, + and the total amount to encode. + + Returns a tuple of `datagen`, `headers`, where `datagen` is a + generator that will yield blocks of data that make up the encoded + parameters, and `headers` is a dictionary with the assoicated + Content-Type and Content-Length headers. + + Examples: + + >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] ) + >>> s = "".join(datagen) + >>> assert "value2" in s and "value1" in s + + >>> p = MultipartParam("key", "value2") + >>> datagen, headers = multipart_encode( [("key", "value1"), p] ) + >>> s = "".join(datagen) + >>> assert "value2" in s and "value1" in s + + >>> datagen, headers = multipart_encode( {"key": "value1"} ) + >>> s = "".join(datagen) + >>> assert "value2" not in s and "value1" in s + """ + if boundary is None: + boundary = gen_boundary() + else: + boundary = quote_plus(boundary) + + headers = get_headers(params, boundary) + params = MultipartParam.from_params(params) + + return MultipartYielder(params, boundary, callback), headers + + +class ReadableMultiparts(object): + """Wraps instances of the MultipartYielder class as a readable and withable + object.""" + + def __init__(self, multipart_yielder): + self.multipart_yielder = multipart_yielder + self.current_block = None + self.left_over = b'' + + def read(self, size=-1): + result = self.left_over + while size == -1 or len(result) < size: + try: + next_chunk = self.multipart_yielder.next() + if hasattr(next_chunk, "encode"): + next_chunk = next_chunk.encode("utf8") + result += next_chunk + except StopIteration: + break + + if size == -1: + self.left_over = b'' + return result + + self.left_over = result[size:] + return result[:size] + + def __enter__(self): + pass + + def __exit__(self, type, value, traceback): + self.close() + + def close(self): + self.multipart_yielder.reset() diff --git a/src/linkahead/connection/interface.py b/src/linkahead/connection/interface.py new file mode 100644 index 0000000000000000000000000000000000000000..d63dbeb8cc4cd59e056823440948aa54906dd47c --- /dev/null +++ b/src/linkahead/connection/interface.py @@ -0,0 +1,98 @@ +# -*- encoding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""This module defines the CaosDBServerConnection interface.""" +from abc import ABCMeta, abstractmethod, abstractproperty +from warnings import warn + +# meta class compatible with Python 2 *and* 3: +ABC = ABCMeta('ABC', (object, ), {'__slots__': ()}) + + +class CaosDBHTTPResponse(ABC): + """An abstract class which defines a minimal interface for responses of the + LinkAheadServer.""" + + @abstractmethod + def read(self, size=-1): + """Read up to *size* bytes from the response body. + + If size is unspecified or -1, all bytes until EOF are returned. + """ + + @abstractmethod + def getheader(self, name, default=None): + """Return the value of the header *name* or the value of *default* if + there is no such header. + + If there are multiple headers with that name, return all of the + values joined by ', '. If default is an iterable, its elements + are returned likewise. + """ + + @abstractproperty + def status(self): + """Status code of the response.""" + + @abstractmethod + def getheaders(self): + """Return all headers.""" + + def __enter__(self): + pass + + def __exit__(self, type, value, traceback): + self.close() + + @abstractmethod + def close(self): + """close. + + Close this response. Depending on the implementation this might + also close underlying streams, sockets etc. + """ + + +class CaosDBServerConnection(ABC): + """Abstract class which defines the interface for sending requests to the + LinkAhead server.""" + + @abstractmethod + def request(self, method, path, headers=None, body=None, **kwargs): + """Abstract method. Implement this method for HTTP requests to the + LinkAhead server. + + Returns + ------- + CaosDBHTTPResponse + """ + + @abstractmethod + def configure(self, **kwargs): + """Configure the connection. This method is to be called by + configure_connection. + + Returns + ------- + None + """ diff --git a/src/linkahead/connection/mockup.py b/src/linkahead/connection/mockup.py new file mode 100644 index 0000000000000000000000000000000000000000..9b69971c0409708f221c402f540fac85ff9c527e --- /dev/null +++ b/src/linkahead/connection/mockup.py @@ -0,0 +1,100 @@ +# -*- encoding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""Classes and functions to immitate a LinkAhead server without actually setting +up a tcp connection.""" + +from __future__ import unicode_literals, print_function +from io import StringIO +from .connection import CaosDBServerConnection, CaosDBHTTPResponse + + +def _request_log_message(method, path, headers, body, **kwargs): + return "{method}: {path}{nl0}{kwargs}{nl1}{headers}{nl2}{body}".format( + method=method, + path=path, + headers=headers if headers else "", + body=body if body else "", + kwargs=kwargs if kwargs else "", + nl0="\n" if kwargs else "", + nl1="\n" if headers else "", + nl2="\n" if body else "") + + +class MockUpResponse(CaosDBHTTPResponse): + """A class for the responses of a request to the MockUpServerConnection.""" + + def __init__(self, status, headers, body): + self._status = status + self.headers = headers + self.response = StringIO(body) + + @property + def status(self): + """Return the status of the response.""" + return self._status + + def read(self, size=-1): + """Return the body of the response.""" + return self.response.read(size).encode() + + def getheader(self, name, default=None): + """Get the contents of the header `name`, or `default` if there is no + matching header.""" + return self.headers[name] if name in self.headers else default + + def getheaders(self): + return self.headers + + def close(self): + pass + + +class MockUpServerConnection(CaosDBServerConnection): + """The mock-up connection which does not actually connect to anything but + just returns predefined responses which mimic the LinkAhead server.""" + + def __init__(self): + self.resources = [self._login] + + def _login(self, method, path, headers, body): + if method == "POST" and path == "login": + return MockUpResponse(200, + headers={"AuthToken": + "mockup-auth-token"}, + body="") + + def configure(self, **kwargs): + """This configure method does nothing.""" + + def request(self, method, path, headers=None, body=None, **kwargs): + """Search a resource in the `resources` list which is answering to the + request and return the response or raise a RuntimeError.""" + for resource in self.resources: + response = resource( + method=method, path=path, headers=headers, body=body, **kwargs) + if response: + return response + raise RuntimeError( + "No response for this request - " + + _request_log_message(method, path, headers, body, **kwargs)) diff --git a/src/linkahead/connection/utils.py b/src/linkahead/connection/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..90ec6b5ba6789747f5d4452a1260306b716b1f7e --- /dev/null +++ b/src/linkahead/connection/utils.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""Utility functions for the connection module.""" +from __future__ import unicode_literals, print_function +from builtins import str as unicode +from urllib.parse import (urlencode as _urlencode, quote as _quote, + urlparse, urlunparse, unquote as _unquote) +import re + + +def urlencode(query): + """Convert a dict of into a url-encoded (unicode) string. + + This is basically a python2/python3 compatibility wrapper for the respective + functions in the urllib module with some slight modifications. + + 1) A `None` is translated to an empty string. + >>> urlencode({'key': None}) + 'key=' + + 2) Unicode strings are allowed for python2. + >>> urlencode({'kèy': 'välüe'}) + 'k%C3%A8y=v%C3%A4l%C3%BCe' + + 3) All other parameters which can be passed to the respective functions are + not implemented here and the default parameters will be used. + +.. code:: + + >>> urlencode({'key': ['val1', 'val2']}, doseq=True) + Traceback (most recent call last): + ... + TypeError: urlencode() got an unexpected keyword argument 'doseq' + +Otherwise, this functions works exactly as its counterparts in the urllib +modules when they are called with only the query parameter. + + Parameters + ---------- + query : dict + A dict of str or unicode keys with None, unicode or str values. + + Returns + ------- + str + A series of `key=value` pairs separated by `&`. + + Raises + ------ + AttributeError + If a key or a value does not have a 'encode' method. + """ + return str( + _urlencode({ + key.encode("utf-8"): (val.encode("utf-8") + if val is not None else '') + for key, val in query.items() + })) + + +def make_uri_path(segments=None, query=None): + """Url-encode all segments, concat them with slashes and append the query. + + Examples + -------- + >>> make_uri_path(['a','b']) + 'a/b' + >>> make_uri_path(['a','ö']) + 'a/%C3%B6' + >>> make_uri_path(['a','b'], {'key': 'val'}) + 'a/b?key=val' + + Parameters + ---------- + segments : list of str + The segments of the path + query: dict + A dict of str keys with None or str values. + + Returns + ------- + str + A relative uri path (no host information, possibly no root path). + """ + path_no_query = ("/".join([quote(segment) for segment in segments]) + if segments else "") + return str(path_no_query if query is None else "?".join([ + path_no_query, "&".join([ + quote(key) + "=" + + (quote(query[key]) if query[key] is not None else "") + for key in query + ]) + ])) + + +def quote(string): + enc = string.encode('utf-8') + return _quote(enc).replace('/', '%2F') + + +def parse_url(url): + fullurl = urlparse(url) + # make sure the path ends with a slash + if not fullurl.path.endswith("/"): + parse_result = list(fullurl) + parse_result[2] += "/" + fullurl = urlparse(urlunparse(parse_result)) + return fullurl + + +_PATTERN = re.compile(r"^SessionToken=([^;]*);.*$") + + +def unquote(string): + """unquote. + + Decode an urlencoded string into a plain text string. + """ + bts = _unquote(string) + if hasattr(bts, "decode"): + # python 2 + return bts.decode("utf-8") + return bts + + +def parse_auth_token(cookie): + """parse_auth_token. + + Parse an auth token from a cookie. + + Parameters + ---------- + cookie : str + A cookie with an urlencoded authtoken. + + Returns + ------- + str + An auth token string. + """ + auth_token = None + if cookie is not None and _PATTERN.match(cookie): + auth_token = unquote(_PATTERN.split(cookie)[1]) + return auth_token + + +def auth_token_to_cookie(auth_token): + """auth_token_to_cookie. + + Urlencode an auth token string and format it as a cookie. + + Parameters + ---------- + auth_token : str + The plain auth token string. + + Raises + ------ + TypeError + If the auth_token was None + + Returns + ------- + str + A cookie + """ + if auth_token is None: + raise TypeError("Parameter `auth_token` was None.") + return "SessionToken=" + quote(auth_token) + ";" diff --git a/src/linkahead/exceptions.py b/src/linkahead/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..a6abe09edbbece2a38bdc6c5e1296a2b3dd81bde --- /dev/null +++ b/src/linkahead/exceptions.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2020 Florian Spreckelsen <f.spreckelsen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""The exceptions module defines exceptions for HTTP Errors (4xx and 5xx and +HTTP response codes) and for transaction errors (i.e. missing permissions, +dependencies, non-passing consistency checks etc.). +""" + +from warnings import warn + +from lxml import etree + + +class CaosDBException(Exception): + # TODO remove from inheritance change once deprecation period is over + def __init__(self, msg): + warn(("The name CaosDBException is deprecated. Please use LinkAheadException."), + DeprecationWarning) + Exception.__init__(self, msg) + self.msg = msg + + +class LinkAheadException(CaosDBException): + """Base class of all LinkAhead exceptions.""" + + def __init__(self, msg): + Exception.__init__(self, msg) + self.msg = msg + + +class ConfigurationError(LinkAheadException): + """ConfigurationError. + + Indicates a misconfiguration. + + Parameters + ---------- + msg : str + A descriptin of the misconfiguration. The constructor adds + a few lines with explainingg where to find the configuration. + + Attributes + ---------- + msg : str + A description of the misconfiguration. + """ + + def __init__(self, msg): + super().__init__(msg + ConfigurationError._INFO) + + _INFO = ("\n\nPlease check your ~/.pylinkahead.ini and your $PWD/" + ".pylinkahead.ini. Does at least one of them exist and are they correct?") + + +class ServerConfigurationException(LinkAheadException): + """The server is configured in a different way than expected. + + This can be for example unexpected flags or settings or missing extensions. + """ + + +class HTTPClientError(LinkAheadException): + """HTTPClientError represents 4xx HTTP client errors.""" + + def __init__(self, msg, status, body): + self.status = status + self.body = body + LinkAheadException.__init__(self, msg) + + +class HTTPServerError(LinkAheadException): + """HTTPServerError represents 5xx HTTP server errors.""" + + def __init__(self, body): + xml = etree.fromstring(body) + error = xml.xpath('/Response/Error')[0] + msg = error.get("description") + + if error.text is not None: + msg = msg + "\n\n" + error.text + LinkAheadException.__init__(self, msg) + + +class CaosDBConnectionError(LinkAheadException): + # TODO remove from inheritance change once deprecation period is over + def __init__(self, msg=None): + warn(("The name CaosDBConnectionError is deprecated. " + "Please use LinkAheadConnectionError."), + DeprecationWarning) + LinkAheadException.__init__(self, msg) + + +class LinkAheadConnectionError(CaosDBConnectionError): + """Connection is not configured or the network is down.""" + + def __init__(self, msg=None): + LinkAheadException.__init__(self, msg) + + +class HTTPURITooLongError(HTTPClientError): + """The URI of the last request was too long.""" + + def __init__(self, msg=None): + HTTPClientError.__init__(self, msg=msg, status=414, body=None) + + +class LoginFailedError(LinkAheadException): + """Login failed. + + Probably, your username/password pair is wrong. + """ + + def __init__(self, msg=None): + LinkAheadException.__init__(self, msg=msg) + + +class HTTPForbiddenError(HTTPClientError): + """You're lacking the required permissions. Corresponds to HTTP status + 403. + + """ + + def __init__(self, msg=None): + HTTPClientError.__init__(self, msg=msg, status=403, body=None) + + +class HTTPResourceNotFoundError(HTTPClientError): + """The requested resource doesn't exist; corresponds to HTTP status + 404. + + """ + + def __init__(self, msg=None): + HTTPClientError.__init__(self, msg=msg, status=404, body=None) + + +class MismatchingEntitiesError(LinkAheadException): + """Mismatching entities were found during container sync.""" + + +# ######################### Bad query errors ########################### + +class BadQueryError(LinkAheadException): + """Base class for query errors that are not transaction errors.""" + + +class PagingConsistencyError(BadQueryError): + """The database state changed between two consecutive paged requests of the + same query.""" + + +class QueryNotUniqueError(BadQueryError): + """A unique query or retrieve found more than one entity.""" + + +class EmptyUniqueQueryError(BadQueryError): + """A unique query or retrieve dound no result.""" + + +# ######################### Transaction errors ######################### + + +class TransactionError(LinkAheadException): + """An error of this type is raised whenever any transaction fails with + one or more entities between client and LinkAhead server. More + detailed errors are collected as direct and indirect children in + the 'errors' list (direct children) and the 'all_errors' set (set + of all direct and indirect children). + + """ + + def __init__(self, error=None, + msg="An error occured during the transaction.", + container=None): + LinkAheadException.__init__(self, msg=msg) + self.errors = [] + self.all_errors = set() + self.entities = [] + self.all_entities = set() + self.container = container + # special case of faulty container + if container is not None and container.get_errors() is not None: + self.code = container.get_errors()[0].code + else: + self.code = None + if error is not None: + self.add_error(error) + + def has_error(self, error_t, direct_children_only=False): + """Check whether this transaction error contains an error of type + error_t. If direct_children_only is True, only direct children + are checked. + + Parameters + ---------- + error_t : EntityError + error type to be checked + direct_children_only: bool, optional + If True, only direct children, i.e., all errors in + self.errors are checked. Else all direct and indirect + children, i.e., all errors in self.all_errors are + used. Default is false. + + Returns + ------- + has_error : bool + True if at least one of the children is of type error_t, + False otherwise. + + """ + + test_set = self.errors if direct_children_only else self.all_errors + return any([isinstance(err, error_t) for err in test_set]) + + def add_error(self, error): + """Add an error as a direct child to this TransactionError. + + @param error: An EntityError or a list of EntityErrors. + + @raise TypeError: If and only if the 'error' parameter is not an + instance of EntityError. + + @return: self. + """ + + if hasattr(error, "__iter__"): + for err in error: + self.add_error(err) + + return self + elif isinstance(error, EntityError): + self.errors.append(error) + self.entities.append(error.entity) + + self.all_errors.add(error) + self.all_errors.update(error.all_errors) + self.all_entities.add(error.entity) + self.all_entities.update(error.all_entities) + + return self + else: + raise TypeError( + "Argument is to be an EntityError or a list of EntityErrors.") + + def _repr_reasons(self, indent): + if self.errors is not None and len(self.errors) > 0: + ret = "\n" + indent + " +--| REASONS |--" + + for err in self.errors: + ret += '\n' + indent + ' | -> ' + \ + err.__str__(indent=indent + ' |') + ret += "\n" + indent + " +----------------" + + return ret + else: + return '' + + def _repr_head(self, indent): + return indent + str(type(self).__name__) + ( + (': ' + self.msg) + if hasattr(self, 'msg') and self.msg is not None + else '' + ) + + def __str__(self, indent=''): + ret = self._repr_head(indent=indent) + ret += self._repr_reasons(indent=indent) + + return ret + + def __repr__(self): + return self.__str__() + + +class EntityError(TransactionError): + """This is the most basic entity error. It is constructed using an + entity that caused the error and the error message attached by the + server. + + """ + + def __init__(self, error=None, entity=None): + TransactionError.__init__(self) + self.error = error + if hasattr(error, "code"): + self.code = error.code + else: + self.code = None + self.entity = entity + + if error is not None and hasattr(error, "encode"): + self.msg = error + elif error is not None and hasattr(error, 'description'): + self.msg = error.description + elif error is None: + self.msg = None + else: + self.msg = str(error) + + @property + def description(self): + """The description of the error.""" + return self.error.description if self.error is not None else None + + def _repr_head(self, indent): + if hasattr(self, 'entity') and self.entity is not None: + return (str(type(self.entity).__name__).upper() + " (id: " + + str(self.entity.id) + ((", name: " + "'" + str(self.entity.name) + "'") if + self.entity.name is not None else '') + ") CAUSED " + + TransactionError._repr_head(self, indent)) + else: + return TransactionError._repr_head(self, indent) + + +class UniqueNamesError(EntityError): + """A name was supposed to be unique but was not.""" + + +class UnqualifiedParentsError(EntityError): + """This entity has unqualified parents (see 'errors' attribute for a + list of errors of the parent entities or 'entities' attribute for + a list of parent entities with errors). + + """ + + +class UnqualifiedPropertiesError(EntityError): + """This entity has unqualified properties (see 'errors' attribute for + a list of errors of the properties or 'entities' attribute for a + list of properties with errors). + + """ + + +class EntityDoesNotExistError(EntityError): + """This entity does not exist.""" + + +class EntityHasNoDatatypeError(EntityError): + """This has to have a data type.""" + + +class ConsistencyError(EntityError): + """The transaction violates database consistency.""" + + +class AuthorizationError(EntityError): + """You are not allowed to do what ever you tried to do. + + Maybe you need more privileges or a user account. + """ + + +class AmbiguousEntityError(EntityError): + """A retrieval of the entity was not possible because there is more + than one possible candidate. + """ diff --git a/src/linkahead/high_level_api.py b/src/linkahead/high_level_api.py new file mode 100644 index 0000000000000000000000000000000000000000..70f1be36283b706f8d38d450d937ab13a9b9e699 --- /dev/null +++ b/src/linkahead/high_level_api.py @@ -0,0 +1,1049 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> +# Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +""" +A high level API for accessing LinkAhead entities from within python. + +This is refactored from apiutils. +""" + +import warnings +from dataclasses import dataclass, fields +from datetime import datetime +from typing import Any, Dict, List, Optional, Union + +import yaml +from dateutil import parser + +import linkahead as db +from .apiutils import create_flat_list, get_type_of_entity_with +from .common.datatype import (BOOLEAN, DATETIME, DOUBLE, FILE, INTEGER, + REFERENCE, TEXT, get_list_datatype, + is_list_datatype, is_reference) + +warnings.warn("""EXPERIMENTAL! The high_level_api module is experimental and may be changed or +removed in the future. Its purpose is to give an impression on how the Python client user interface +might be changed.""") + + +def standard_type_for_high_level_type(high_level_record: "CaosDBPythonEntity", + return_string: bool = False): + """ + For a given CaosDBPythonEntity either return the corresponding + class in the standard CaosDB API or - if return_string is True - return + the role as a string. + """ + if type(high_level_record) == CaosDBPythonRecord: + if not return_string: + return db.Record + return "Record" + elif type(high_level_record) == CaosDBPythonFile: + if not return_string: + return db.File + return "File" + elif type(high_level_record) == CaosDBPythonProperty: + if not return_string: + return db.Property + return "Property" + elif type(high_level_record) == CaosDBPythonRecordType: + if not return_string: + return db.RecordType + return "RecordType" + elif type(high_level_record) == CaosDBPythonEntity: + if not return_string: + return db.Entity + return "Entity" + raise RuntimeError("Incompatible type.") + + +def high_level_type_for_role(role: str): + if role == "Record": + return CaosDBPythonRecord + if role == "File": + return CaosDBPythonFile + if role == "Property": + return CaosDBPythonProperty + if role == "RecordType": + return CaosDBPythonRecordType + if role == "Entity": + return CaosDBPythonEntity + raise RuntimeError("Unknown role.") + + +def high_level_type_for_standard_type(standard_record: db.Entity): + if not isinstance(standard_record, db.Entity): + raise ValueError() + role = standard_record.role + if role == "Record" or type(standard_record) == db.Record: + return CaosDBPythonRecord + elif role == "File" or type(standard_record) == db.File: + return CaosDBPythonFile + elif role == "Property" or type(standard_record) == db.Property: + return CaosDBPythonProperty + elif role == "RecordType" or type(standard_record) == db.RecordType: + return CaosDBPythonRecordType + elif role == "Entity" or type(standard_record) == db.Entity: + return CaosDBPythonEntity + raise RuntimeError("Incompatible type.") + + +@dataclass +class CaosDBPropertyMetaData: + # name is already the name of the attribute + unit: Optional[str] = None + datatype: Optional[str] = None + description: Optional[str] = None + id: Optional[int] = None + importance: Optional[str] = None + + +class CaosDBPythonUnresolved: + pass + + +@dataclass +class CaosDBPythonUnresolvedParent(CaosDBPythonUnresolved): + """ + Parents can be either given by name or by ID. + + When resolved, both fields should be set. + """ + + id: Optional[int] = None + name: Optional[str] = None + + +@dataclass +class CaosDBPythonUnresolvedReference(CaosDBPythonUnresolved): + + def __init__(self, id=None): + self.id = id + + +class CaosDBPythonEntity(object): + + def __init__(self): + """ + Initialize a new CaosDBPythonEntity for the high level python api. + + Parents are either unresolved references or CaosDB RecordTypes. + + Properties are stored directly as attributes for the object. + Property metadata is maintained in a dctionary _properties_metadata that should + never be accessed directly, but only using the get_property_metadata function. + If property values are references to other objects, they will be stored as + CaosDBPythonUnresolvedReference objects that can be resolved later into + CaosDBPythonRecords. + """ + + # Parents are either unresolved references or CaosDB RecordTypes + self._parents: List[Union[ + CaosDBPythonUnresolvedParent, CaosDBPythonRecordType]] = [] + # self._id: int = CaosDBPythonEntity._get_new_id() + self._id: Optional[int] = None + self._name: Optional[str] = None + self._description: Optional[str] = None + self._version: Optional[str] = None + + self._file: Optional[str] = None + self._path: Optional[str] = None + + # name: name of property, value: property metadata + self._properties_metadata: Dict[CaosDBPropertyMetaData] = dict() + + # Store all current attributes as forbidden attributes + # which must not be changed by the set_property function. + self._forbidden = dir(self) + ["_forbidden"] + + def use_parameter(self, name, value): + self.__setattr__(name, value) + return value + + @property + def id(self): + """ + Getter for the id. + """ + return self._id + + @id.setter + def id(self, val: int): + self._id = val + + @property + def name(self): + """ + Getter for the name. + """ + return self._name + + @name.setter + def name(self, val: str): + self._name = val + + @property + def file(self): + """ + Getter for the file. + """ + if type(self) != CaosDBPythonFile: + raise RuntimeError("Please don't use the file attribute for entities" + " that are no files.") + return self._file + + @file.setter + def file(self, val: str): + if val is not None and type(self) != CaosDBPythonFile: + raise RuntimeError("Please don't use the file attribute for entities" + " that are no files.") + self._file = val + + @property + def path(self): + """ + Getter for the path. + """ + if type(self) != CaosDBPythonFile: + raise RuntimeError("Please don't use the path attribute for entities" + " that are no files.") + return self._path + + @path.setter + def path(self, val: str): + if val is not None and type(self) != CaosDBPythonFile: + raise RuntimeError("Please don't use the path attribute for entities" + " that are no files.") + self._path = val + + @property + def description(self): + """ + Getter for the description. + """ + return self._description + + @description.setter + def description(self, val: str): + self._description = val + + @property + def version(self): + """ + Getter for the version. + """ + return self._version + + @version.setter + def version(self, val: str): + self._version = val + + def _set_property_from_entity(self, ent: db.Entity, importance: str, + references: Optional[db.Container], + visited: Dict[int, "CaosDBPythonEntity"]): + """ + Set a new property using an entity from the normal python API. + + ent : db.Entity + The entity to be set. + """ + + if ent.name is None: + raise RuntimeError("Setting properties without name is impossible.") + + if ent.name in self.get_properties(): + raise RuntimeError("Multiproperty not implemented yet.") + + val = self._type_converted_value(ent.value, ent.datatype, + references, visited) + self.set_property( + ent.name, + val, + datatype=ent.datatype) + metadata = self.get_property_metadata(ent.name) + + for prop_name in fields(metadata): + k = prop_name.name + if k == "importance": + metadata.importance = importance + else: + metadata.__setattr__(k, ent.__getattribute__(k)) + + def get_property_metadata(self, prop_name: str) -> CaosDBPropertyMetaData: + """ + Retrieve the property metadata for the property with name prop_name. + + If the property with the given name does not exist or is forbidden, raise an exception. + Else return the metadata associated with this property. + + If no metadata does exist yet for the given property, a new object will be created + and returned. + + prop_name: str + Name of the property to retrieve metadata for. + """ + + if not self.property_exists(prop_name): + raise RuntimeError("The property with name {} does not exist.".format(prop_name)) + + if prop_name not in self._properties_metadata: + self._properties_metadata[prop_name] = CaosDBPropertyMetaData() + + return self._properties_metadata[prop_name] + + def property_exists(self, prop_name: str): + """ + Check whether a property exists already. + """ + return prop_name not in self._forbidden and prop_name in self.__dict__ + + def set_property(self, + name: str, + value: Any, + overwrite: bool = False, + datatype: Optional[str] = None): + """ + Set a property for this entity with a name and a value. + + If this property is already set convert the value into a list and append the value. + This behavior can be overwritten using the overwrite flag, which will just overwrite + the existing value. + + name: str + Name of the property. + + value: Any + Value of the property. + + overwrite: bool + Use this if you definitely only want one property with + that name (set to True). + """ + + if name in self._forbidden: + raise RuntimeError("Entity cannot be converted to a corresponding " + "Python representation. Name of property " + + name + " is forbidden!") + + already_exists = self.property_exists(name) + + if already_exists and not overwrite: + # each call to set_property checks first if it already exists + # if yes: Turn the attribute into a list and + # place all the elements into that list. + att = self.__getattribute__(name) + + if isinstance(att, list): + # just append, see below + pass + else: + old_att = self.__getattribute__(name) + self.__setattr__(name, [old_att]) + att = self.__getattribute__(name) + att.append(value) + else: + self.__setattr__(name, value) + + def __setattr__(self, name: str, val: Any): + """ + Allow setting generic properties. + """ + + # TODO: implement checking the value to correspond to one of the datatypes + # known for conversion. + + super().__setattr__(name, val) + + def _type_converted_list(self, + val: List, + pr: str, + references: Optional[db.Container], + visited: Dict[int, "CaosDBPythonEntity"]): + """ + Convert a list to a python list of the correct type. + + val: List + The value of a property containing the list. + + pr: str + The datatype according to the database entry. + """ + if not is_list_datatype(pr) and not isinstance(val, list): + raise RuntimeError("Not a list.") + + return [ + self._type_converted_value(i, get_list_datatype(pr), references, + visited) for i in val] + + def _type_converted_value(self, + val: Any, + pr: str, + references: Optional[db.Container], + visited: Dict[int, "CaosDBPythonEntity"]): + """ + Convert val to the correct type which is indicated by the database + type string in pr. + + References with ids will be turned into CaosDBPythonUnresolvedReference. + """ + + if val is None: + return None + elif isinstance(val, db.Entity): + # this needs to be checked as second case as it is the ONLY + # case which does not depend on pr + # TODO: we might need to pass through the reference container + return convert_to_python_object(val, references, visited) + elif isinstance(val, list): + return self._type_converted_list(val, pr, references, visited) + elif pr is None: + return val + elif pr == DOUBLE: + return float(val) + elif pr == BOOLEAN: + return bool(val) + elif pr == INTEGER: + return int(val) + elif pr == TEXT: + return str(val) + elif pr == FILE: + return CaosDBPythonUnresolvedReference(val) + elif pr == REFERENCE: + return CaosDBPythonUnresolvedReference(val) + elif pr == DATETIME: + return self._parse_datetime(val) + elif is_list_datatype(pr): + return self._type_converted_list(val, pr, references, visited) + else: + # Generic references to entities: + return CaosDBPythonUnresolvedReference(val) + + def _parse_datetime(self, val: Union[str, datetime]): + """ + Convert val into a datetime object. + """ + if isinstance(val, datetime): + return val + return parser.parse(val) + + def get_property(self, name: str): + """ + Return the value of the property with name name. + + Raise an exception if the property does not exist. + """ + if not self.property_exists(name): + raise RuntimeError("Property {} does not exist.".format(name)) + att = self.__getattribute__(name) + return att + + def attribute_as_list(self, name: str): + """ + This is a workaround for the problem that lists containing only one + element are indistinguishable from simple types in this + representation. + + TODO: still relevant? seems to be only a problem if LIST types are not used. + """ + att = self.get_property(name) + + if isinstance(att, list): + return att + else: + return [att] + + def add_parent(self, parent: Union[ + CaosDBPythonUnresolvedParent, "CaosDBPythonRecordType", str]): + """ + Add a parent to this entity. Either using an unresolved parent or + using a real record type. + + Strings as argument for parent will automatically be converted to an + unresolved parent. Likewise, integers as argument will be automatically converted + to unresolved parents with just an id. + """ + + if isinstance(parent, str): + parent = CaosDBPythonUnresolvedParent(name=parent) + + if isinstance(parent, int): + parent = CaosDBPythonUnresolvedParent(id=parent) + + if self.has_parent(parent): + raise RuntimeError("Duplicate parent.") + self._parents.append(parent) + + def get_parents(self): + """ + Returns all parents of this entity. + + Use has_parent for checking for existence of parents + and add_parent for adding parents to this entity. + """ + return self._parents + + def has_parent(self, parent: Union[ + CaosDBPythonUnresolvedParent, "CaosDBPythonRecordType"]): + """ + Check whether this parent already exists for this entity. + + Strings as argument for parent will automatically be converted to an + unresolved parent. Likewise, integers as argument will be automatically converted + to unresolved parents with just an id. + """ + + if isinstance(parent, str): + parent = CaosDBPythonUnresolvedParent(name=parent) + + if isinstance(parent, int): + parent = CaosDBPythonUnresolvedParent(id=parent) + + for p in self._parents: + if p.id is not None and p.id == parent.id: + return True + elif p.name is not None and p.name == parent.name: + return True + return False + + def _resolve_caosdb_python_unresolved_reference(self, propval, deep, + references, visited): + # This does not make sense for unset ids: + if propval.id is None: + raise RuntimeError("Unresolved property reference without an ID.") + # have we encountered this id before: + if propval.id in visited: + # self.__setattr__(prop, visited[propval.id]) + # don't do the lookup in the references container + return visited[propval.id] + + if references is None: + ent = db.Entity(id=propval.id).retrieve() + obj = convert_to_python_object(ent, references) + visited[propval.id] = obj + if deep: + obj.resolve_references(deep, references, visited) + return obj + + # lookup in container: + for ent in references: + # Entities in container without an ID will be skipped: + if ent.id is not None and ent.id == propval.id: + # resolve this entity: + obj = convert_to_python_object(ent, references) + visited[propval.id] = obj + # self.__setattr__(prop, visited[propval.id]) + if deep: + obj.resolve_references(deep, references, visited) + return obj + return propval + + def resolve_references(self, deep: bool, references: db.Container, + visited: Optional[Dict[Union[str, int], + "CaosDBPythonEntity"]] = None): + """ + Resolve this entity's references. This affects unresolved properties as well + as unresolved parents. + + deep: bool + If True recursively resolve references also for all resolved references. + + references: Optional[db.Container] + A container with references that might be resolved. + If None is passed as the container, this function tries to resolve entities from a running + CaosDB instance directly. + """ + + # This parameter is used in the recursion to keep track of already visited + # entites (in order to detect cycles). + if visited is None: + visited = dict() + + for parent in self.get_parents(): + # TODO + if isinstance(parent, CaosDBPythonUnresolvedParent): + pass + + for prop in self.get_properties(): + propval = self.__getattribute__(prop) + # Resolve all previously unresolved attributes that are entities: + if deep and isinstance(propval, CaosDBPythonEntity): + propval.resolve_references(deep, references) + elif isinstance(propval, list): + resolvedelements = [] + for element in propval: + if deep and isinstance(element, CaosDBPythonEntity): + element.resolve_references(deep, references) + resolvedelements.append(element) + if isinstance(element, CaosDBPythonUnresolvedReference): + resolvedelements.append( + self._resolve_caosdb_python_unresolved_reference(element, deep, + references, visited)) + else: + resolvedelements.append(element) + self.__setattr__(prop, resolvedelements) + + elif isinstance(propval, CaosDBPythonUnresolvedReference): + val = self._resolve_caosdb_python_unresolved_reference(propval, deep, + references, visited) + self.__setattr__(prop, val) + + def get_properties(self): + """ + Return the names of all properties. + """ + + return [p for p in self.__dict__ + if p not in self._forbidden] + + @staticmethod + def deserialize(serialization: dict): + """ + Deserialize a yaml representation of an entity in high level API form. + """ + + if "role" in serialization: + entity = high_level_type_for_role(serialization["role"])() + else: + entity = CaosDBPythonRecord() + + if "parents" in serialization: + for parent in serialization["parents"]: + if "unresolved" in parent: + id = None + name = None + if "id" in parent: + id = parent["id"] + if "name" in parent: + name = parent["name"] + entity.add_parent(CaosDBPythonUnresolvedParent( + id=id, name=name)) + else: + raise NotImplementedError( + "Currently, only unresolved parents can be deserialized.") + + for baseprop in ("name", "id", "description", "version"): + if baseprop in serialization: + entity.__setattr__(baseprop, serialization[baseprop]) + + if type(entity) == CaosDBPythonFile: + entity.file = serialization["file"] + entity.path = serialization["path"] + + for p in serialization["properties"]: + # The property needs to be set first: + + prop = serialization["properties"][p] + if isinstance(prop, dict): + if "unresolved" in prop: + entity.__setattr__(p, CaosDBPythonUnresolvedReference( + id=prop["id"])) + else: + entity.__setattr__(p, + entity.deserialize(prop)) + else: + entity.__setattr__(p, prop) + + # if there is no metadata in the yaml file just initialize an empty metadata object + if "metadata" in serialization and p in serialization["metadata"]: + metadata = serialization["metadata"][p] + propmeta = entity.get_property_metadata(p) + + for f in fields(propmeta): + if f.name in metadata: + propmeta.__setattr__(f.name, metadata[f.name]) + else: + pass + # raise NotImplementedError() + + return entity + + def serialize(self, without_metadata: bool = False, visited: dict = None): + """ + Serialize necessary information into a dict. + + without_metadata: bool + If True don't set the metadata field in order to increase + readability. Not recommended if deserialization is needed. + """ + + if visited is None: + visited = dict() + + if self in visited: + return visited[self] + + metadata: Dict[str, Any] = dict() + properties = dict() + parents = list() + + # The full information to be returned: + fulldict = dict() + visited[self] = fulldict + + # Add CaosDB role: + fulldict["role"] = standard_type_for_high_level_type(self, True) + + for parent in self._parents: + if isinstance(parent, CaosDBPythonEntity): + parents.append(parent.serialize(without_metadata, visited)) + elif isinstance(parent, CaosDBPythonUnresolvedParent): + parents.append({"name": parent.name, "id": parent.id, + "unresolved": True}) + else: + raise RuntimeError("Incompatible class used as parent.") + + for baseprop in ("name", "id", "description", "version"): + val = self.__getattribute__(baseprop) + if val is not None: + fulldict[baseprop] = val + + if type(self) == CaosDBPythonFile: + fulldict["file"] = self.file + fulldict["path"] = self.path + + for p in self.get_properties(): + m = self.get_property_metadata(p) + metadata[p] = dict() + for f in fields(m): + val = m.__getattribute__(f.name) + if val is not None: + metadata[p][f.name] = val + + val = self.get_property(p) + if isinstance(val, CaosDBPythonUnresolvedReference): + properties[p] = {"id": val.id, "unresolved": True} + elif isinstance(val, CaosDBPythonEntity): + properties[p] = val.serialize(without_metadata, visited) + elif isinstance(val, list): + serializedelements = [] + for element in val: + if isinstance(element, CaosDBPythonUnresolvedReference): + elm = dict() + elm["id"] = element.id + elm["unresolved"] = True + serializedelements.append(elm) + elif isinstance(element, CaosDBPythonEntity): + serializedelements.append( + element.serialize(without_metadata, + visited)) + else: + serializedelements.append(element) + properties[p] = serializedelements + else: + properties[p] = val + + fulldict["properties"] = properties + fulldict["parents"] = parents + + if not without_metadata: + fulldict["metadata"] = metadata + return fulldict + + def __str__(self): + return yaml.dump(self.serialize(False)) + + # This seemed like a good solution, but makes it difficult to + # compare python objects directly: + # + # def __repr__(self): + # return yaml.dump(self.serialize(True)) + + +class CaosDBPythonRecord(CaosDBPythonEntity): + pass + + +class CaosDBPythonRecordType(CaosDBPythonEntity): + pass + + +class CaosDBPythonProperty(CaosDBPythonEntity): + pass + + +class CaosDBMultiProperty: + """ + This implements a multi property using a python list. + """ + + def __init__(self): + raise NotImplementedError() + + +class CaosDBPythonFile(CaosDBPythonEntity): + def download(self, target=None): + if self.id is None: + raise RuntimeError("Cannot download file when id is missing.") + f = db.File(id=self.id).retrieve() + return f.download(target) + + +BASE_ATTRIBUTES = ( + "id", "name", "description", "version", "path", "file") + + +def _single_convert_to_python_object(robj: CaosDBPythonEntity, + entity: db.Entity, + references: Optional[db.Container] = None, + visited: Optional[Dict[int, + "CaosDBPythonEntity"]] = None): + """ + Convert a db.Entity from the standard API to a (previously created) + CaosDBPythonEntity from the high level API. + + This method will not resolve any unresolved references, so reference properties + as well as parents will become unresolved references in the first place. + + The optional third parameter can be used + to resolve references that occur in the converted entities and resolve them + to their correct representations. (Entities that are not found remain as + CaosDBPythonUnresolvedReferences.) + + Returns the input object robj. + """ + + # This parameter is used in the recursion to keep track of already visited + # entites (in order to detect cycles). + if visited is None: + visited = dict() + + if id(entity) in visited: + return visited[id(entity)] + else: + visited[id(entity)] = robj + + for base_attribute in BASE_ATTRIBUTES: + val = entity.__getattribute__(base_attribute) + if val is not None: + if isinstance(val, db.common.models.Version): + val = val.id + robj.__setattr__(base_attribute, val) + + for prop in entity.properties: + robj._set_property_from_entity(prop, entity.get_importance(prop), references, + visited) + + for parent in entity.parents: + robj.add_parent(CaosDBPythonUnresolvedParent(id=parent.id, + name=parent.name)) + + return robj + + +def _convert_property_value(propval): + if isinstance(propval, CaosDBPythonUnresolvedReference): + propval = propval.id + elif isinstance(propval, CaosDBPythonEntity): + propval = _single_convert_to_entity( + standard_type_for_high_level_type(propval)(), propval) + elif isinstance(propval, list): + propval = [_convert_property_value(element) for element in propval] + + # TODO: test case for list missing + + return propval + + +def _single_convert_to_entity(entity: db.Entity, + robj: CaosDBPythonEntity): + """ + Convert a CaosDBPythonEntity to an entity in standard pylib format. + + entity: db.Entity + An empty entity. + + robj: CaosDBPythonEntity + The CaosDBPythonEntity that is supposed to be converted to the entity. + """ + + for base_attribute in BASE_ATTRIBUTES: + if base_attribute in ("file", "path") and not isinstance(robj, CaosDBPythonFile): + continue + + # Skip version: + if base_attribute == "version": + continue + + val = robj.__getattribute__(base_attribute) + + if val is not None: + entity.__setattr__(base_attribute, val) + + for parent in robj.get_parents(): + if isinstance(parent, CaosDBPythonUnresolvedParent): + entity.add_parent(name=parent.name, id=parent.id) + elif isinstance(parent, CaosDBPythonRecordType): + raise NotImplementedError() + else: + raise RuntimeError("Incompatible class used as parent.") + + for prop in robj.get_properties(): + propval = robj.__getattribute__(prop) + metadata = robj.get_property_metadata(prop) + + propval = _convert_property_value(propval) + + entity.add_property( + name=prop, + value=propval, + unit=metadata.unit, + importance=metadata.importance, + datatype=metadata.datatype, + description=metadata.description, + id=metadata.id) + + return entity + + +def convert_to_entity(python_object): + if isinstance(python_object, db.Container): + # Create a list of objects: + + return [convert_to_entity(i) for i in python_object] + elif isinstance(python_object, CaosDBPythonRecord): + return _single_convert_to_entity(db.Record(), python_object) + elif isinstance(python_object, CaosDBPythonFile): + return _single_convert_to_entity(db.File(), python_object) + elif isinstance(python_object, CaosDBPythonRecordType): + return _single_convert_to_entity(db.RecordType(), python_object) + elif isinstance(python_object, CaosDBPythonProperty): + return _single_convert_to_entity(db.Property(), python_object) + elif isinstance(python_object, CaosDBPythonEntity): + return _single_convert_to_entity(db.Entity(), python_object) + else: + raise ValueError("Cannot convert an object of this type.") + + +def convert_to_python_object(entity: Union[db.Container, db.Entity], + references: Optional[db.Container] = None, + visited: Optional[Dict[int, + "CaosDBPythonEntity"]] = None): + """ + Convert either a container of CaosDB entities or a single CaosDB entity + into the high level representation. + + The optional second parameter can be used + to resolve references that occur in the converted entities and resolve them + to their correct representations. (Entities that are not found remain as + CaosDBPythonUnresolvedReferences.) + """ + if isinstance(entity, db.Container): + # Create a list of objects: + return [convert_to_python_object(i, references, visited) for i in entity] + + # TODO: recursion problems? + return _single_convert_to_python_object( + high_level_type_for_standard_type(entity)(), + entity, + references, + visited) + + +def new_high_level_entity(entity: db.RecordType, + importance_level: str, + name: Optional[str] = None): + """ + Create an new record in high level format based on a record type in standard format. + + entity: db.RecordType + The record type to initialize the new record from. + + importance_level: str + None, obligatory, recommended or suggested + Initialize new properties up to this level. + Properties in the record type with no importance will be added + regardless of the importance_level. + + name: str + Name of the new record. + """ + + r = db.Record(name=name) + r.add_parent(entity) + + impmap = { + None: 0, "SUGGESTED": 3, "RECOMMENDED": 2, "OBLIGATORY": 1} + + for prop in entity.properties: + imp = entity.get_importance(prop) + if imp is not None and impmap[importance_level] < impmap[imp]: + continue + + r.add_property(prop) + + return convert_to_python_object(r) + + +def create_record(rtname: str, name: Optional[str] = None, **kwargs): + """ + Create a new record based on the name of a record type. The new record is returned. + + rtname: str + The name of the record type. + + name: str + This is optional. A name for the new record. + + kwargs: + Additional arguments are used to set attributes of the + new record. + """ + obj = new_high_level_entity( + db.RecordType(name=rtname).retrieve(), "SUGGESTED", name) + for key, value in kwargs.items(): + obj.__setattr__(key, value) + return obj + + +def load_external_record(record_name: str): + """ + Retrieve a record by name and convert it to the high level API format. + """ + return convert_to_python_object(db.Record(name=record_name).retrieve()) + + +def create_entity_container(record: CaosDBPythonEntity): + """ + Convert this record into an entity container in standard format that can be used + to insert or update entities in a running CaosDB instance. + """ + ent = convert_to_entity(record) + lse: List[db.Entity] = [ent] + create_flat_list([ent], lse) + return db.Container().extend(lse) + + +def query(query: str, + resolve_references: Optional[bool] = True, + references: Optional[db.Container] = None): + """ + + """ + res = db.execute_query(query) + objects = convert_to_python_object(res) + if resolve_references: + for obj in objects: + obj.resolve_references(True, references) + return objects diff --git a/src/caosdb/schema-pycaosdb-ini.yml b/src/linkahead/schema-pycaosdb-ini.yml similarity index 89% rename from src/caosdb/schema-pycaosdb-ini.yml rename to src/linkahead/schema-pycaosdb-ini.yml index cb07dfeb84bc16e212100232403b0f66543c73e9..89ce98570738fdd29dba81de25a2c022c1581467 100644 --- a/src/caosdb/schema-pycaosdb-ini.yml +++ b/src/linkahead/schema-pycaosdb-ini.yml @@ -10,11 +10,11 @@ schema-pycaosdb-ini: type: integer enum: [0, 1, 2] Connection: - description: Settings for the connection to the CaosDB server + description: Settings for the connection to the LinkAhead server additionalProperties: false properties: url: - description: "URL of the CaosDB server. Allowed are HTTP and HTTPS connections. However, since authentication tokens and sometimes even passwords are send in plain text to the server it is **highly** recommended to use HTTPS connections whenever possible. HTTP is ok for testing and debugging." + description: "URL of the LinkAhead server. Allowed are HTTP and HTTPS connections. However, since authentication tokens and sometimes even passwords are send in plain text to the server it is **highly** recommended to use HTTPS connections whenever possible. HTTP is ok for testing and debugging." type: string pattern: http(s)?://[-a-zA-Z0-9\.]+(:[0-9]+)?(/)? examples: ["https://demo.indiscale.com/", "http://localhost:10080/"] @@ -98,16 +98,16 @@ schema-pycaosdb-ini: then: required: [url, username] IntegrationTests: - description: "Used by the integration test suite from the caosdb-pyinttest repo." + description: "Used by the integration test suite from the linkahead-pyinttest repo." additionalProperties: true Misc: description: "Some additional configuration settings." additionalProperties: true advancedtools: - description: "Configuration settings for the caosadvancedtools." + description: "Configuration settings for the linkahead-advancedtools." additionalProperties: true caoscrawler: - description: "Configuration settings for the CaosDB Crawler." + description: "Configuration settings for the LinkAhead Crawler." additionalProperties: true sss_helper: description: "Configuration settings for server-side scripting." diff --git a/src/linkahead/utils/__init__.py b/src/linkahead/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/linkahead/utils/caosdb_admin.py b/src/linkahead/utils/caosdb_admin.py new file mode 100644 index 0000000000000000000000000000000000000000..4128a5d0b4a45b96adcabd04820aa8b4c1c2c3c8 --- /dev/null +++ b/src/linkahead/utils/caosdb_admin.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +import warnings +import sys + +from linkahead.utils.linkahead_admin import main + +if __name__ == "__main__": + warnings.warn("caosdb_admin.py is deprecated. Please use linkahead_admin.py", + DeprecationWarning) + sys.exit(main()) diff --git a/src/linkahead/utils/checkFileSystemConsistency.py b/src/linkahead/utils/checkFileSystemConsistency.py new file mode 100755 index 0000000000000000000000000000000000000000..29d03dd5c63da3713ab134573f80a0b96919ce9f --- /dev/null +++ b/src/linkahead/utils/checkFileSystemConsistency.py @@ -0,0 +1,127 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""requests the server to execute checkFileSystemConsistency job.""" + +import sys +import linkahead as db + +from argparse import ArgumentParser +from argparse import RawDescriptionHelpFormatter + +__version__ = 0.1 +__date__ = '2016-08-31' +__updated__ = '2016-09-01' + + +class CLIError(Exception): + """Generic exception to raise and log different fatal errors.""" + + def __init__(self, msg): + super(CLIError).__init__(type(self)) + self.msg = "E: %s" % msg + + def __str__(self): + return self.msg + + def __unicode__(self): + return self.msg + + +def runCheck(timeout, location): + """ Request the LinkAhead server to check the file system for consistency. + + location == None means that the whole file system is being checked. + Otherwise only a the directory tree under location is being checked. + """ + + if (timeout is not None): + db.get_config().set("Connection", "timeout", str(100 + int(timeout))) + files = db.Container().retrieve( + unique=False, raise_exception_on_error=False, flags={ + "fileStorageConsistency": ( + "-t " + str(timeout) if timeout else "") + ( + location if location else ""), }) + return files + + +def main(argv=None): + """Command line options.""" + + if argv is None: + argv = sys.argv + else: + sys.argv.extend(argv) + + # program_name = os.path.basename(sys.argv[0]) + program_version = "v%s" % __version__ + program_build_date = str(__updated__) + program_version_message = '%%(prog)s %s (%s)' % ( + program_version, program_build_date) + program_license = ''' + + Copyright 2016 BMPG. All rights reserved. + + Distributed on an "AS IS" basis without warranties + or conditions of any kind, either express or implied. + +USAGE +''' + + # Setup argument parser + parser = ArgumentParser(description=program_license, + formatter_class=RawDescriptionHelpFormatter) + parser.add_argument( + "-v", + "--verbose", + dest="verbose", + action="count", + help="set verbosity level [default: %(default)s]", + default=0) + parser.add_argument('-V', '--version', action='version', + version=program_version_message) + parser.add_argument( + '-t', + '--timeout', + dest="timeout", + help="timeout in seconds for the database requests. [default: %(default)s]", + metavar="TIMEOUT", + default="200") + parser.add_argument('location') + + # Process arguments + args = parser.parse_args() + global VERBOSITY + + VERBOSITY = args.verbose + TIMEOUT = args.timeout + + print(runCheck(TIMEOUT, args.location).messages) + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/linkahead/utils/create_revision.py b/src/linkahead/utils/create_revision.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6ecc8148859d0ee0908412ff80d20d465cdb25 --- /dev/null +++ b/src/linkahead/utils/create_revision.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""provides utilities for creating revisions of database entries.""" +import linkahead as db +from linkahead import INTEGER, LIST + + +def bend_references(from_id, to_id, except_for=None): + """redirects all references to a new target. + + All entities having a reference pointing to from_id are found + and those references are changed to point to to_id. + entities having an id listed in except_for are excluded. + +Parameters +---------- + +from_id : int + the old object to which references where pointing +to_id : int + the new object to which references will be pointing +except_for : list of int + entities with id of this list will not be changed + """ + if except_for is None: + except_for = [to_id] + entities = db.execute_query( + "FIND ENTITY WHICH references {}".format(from_id)) + for ent in entities: + if ent.id in except_for: + continue + for prop in ent.properties: + if isinstance( + prop.value, int) and ( + prop.datatype != INTEGER) and ( + prop.value == from_id): + prop.value = to_id + if (isinstance(prop.value, list) + and len(prop.value) > 0 + and isinstance(prop.value[0], int) + and (prop.datatype != LIST(INTEGER)) + and from_id in prop.value): + index = prop.value.index(from_id) + prop.value[index] = to_id + ent.update() + + +def create_revision(old_id, prop, value): + """creates a revision of an existing record. + + This function changes the record with id old_id. The value of the + propertye prop is changed to value. + +Parameters +---------- + +old_id : int + id of the record to be changed +prop : string + name of the property to be changed +value : type of corresponding property + the new value of the corresponding property +""" + record = db.execute_query("FIND {}".format(old_id))[0] + new_rec = record.copy() + new_rec.get_property(prop).value = value + try: + new_rec.remove_property("revisionOf") + except BaseException: + pass + new_rec.add_property(name="revisionOf", value=record.id) + new_rec.insert() + bend_references(record.id, new_rec.id) diff --git a/src/linkahead/utils/get_entity.py b/src/linkahead/utils/get_entity.py new file mode 100644 index 0000000000000000000000000000000000000000..ea9f3228bfc32f223979846623fccdec45752e5d --- /dev/null +++ b/src/linkahead/utils/get_entity.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> +# Copyright (C) 2023 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# + +"""Convenience functions to retrieve a specific entity.""" + +from typing import Union +from ..common.models import execute_query, Entity + + +def get_entity_by_name(name: str) -> Entity: + """Return the result of a unique query that uses the name to find the correct entity. + + Submits the query "FIND ENTITY WITH name='{name}'". + """ + return execute_query(f"FIND ENTITY WITH name='{name}'", unique=True) + + +def get_entity_by_id(eid: Union[str, int]) -> Entity: + """Return the result of a unique query that uses the id to find the correct entity. + + Submits the query "FIND ENTITY WITH id='{eid}'". + """ + return execute_query(f"FIND ENTITY WITH id='{eid}'", unique=True) + + +def get_entity_by_path(path: str) -> Entity: + """Return the result of a unique query that uses the path to find the correct file. + + Submits the query "FIND FILE WHICH IS STORED AT '{path}'". + """ + return execute_query(f"FIND FILE WHICH IS STORED AT '{path}'", unique=True) diff --git a/src/linkahead/utils/git_utils.py b/src/linkahead/utils/git_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7a58272a3bef1930f75a1e08364349388e2bb89f --- /dev/null +++ b/src/linkahead/utils/git_utils.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# +# This file is a part of the CaosDB Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020-2022 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""git-utils: Some functions for retrieving information about git repositories. + +""" + +import logging +import tempfile + +from subprocess import call + +logger = logging.getLogger(__name__) + + +def get_origin_url_in(folder: str): + """return the Fetch URL of the git repository in the given folder.""" + with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: + call(["git", "remote", "show", "origin"], stdout=t, cwd=folder) + with open(t.name, "r") as t: + urlString = "Fetch URL:" + + for line in t.readlines(): + if urlString in line: + return line[line.find(urlString) + len(urlString):].strip() + + return None + + +def get_diff_in(folder: str, save_dir=None): + """returns the name of a file where the out put of "git diff" in the given + folder is stored.""" + with tempfile.NamedTemporaryFile(delete=False, mode="w", dir=save_dir) as t: + call(["git", "diff"], stdout=t, cwd=folder) + + return t.name + + +def get_branch_in(folder: str): + """returns the current branch of the git repository in the given folder. + + The command "git branch" is called in the given folder and the + output is returned + """ + with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: + call(["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=t, cwd=folder) + with open(t.name, "r") as t: + return t.readline().strip() + + +def get_commit_in(folder: str): + """returns the commit hash in of the git repository in the given folder. + + The command "git log -1 --format=%h" is called in the given folder + and the output is returned + """ + + with tempfile.NamedTemporaryFile(delete=False, mode="w") as t: + call(["git", "log", "-1", "--format=%h"], stdout=t, cwd=folder) + with open(t.name, "r") as t: + return t.readline().strip() diff --git a/src/linkahead/utils/linkahead_admin.py b/src/linkahead/utils/linkahead_admin.py new file mode 100755 index 0000000000000000000000000000000000000000..f7e3b8b63f18e37e6210f2aa03f34ce5b0f688d4 --- /dev/null +++ b/src/linkahead/utils/linkahead_admin.py @@ -0,0 +1,657 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""A small LinkAhead client with a focus on administration of the server.""" + +from __future__ import print_function, unicode_literals + +import getpass +import sys +from argparse import ArgumentParser, RawDescriptionHelpFormatter + +import linkahead as db +from linkahead import administration as admin +from linkahead.exceptions import HTTPClientError + +__all__ = [] +__version__ = 0.3 +__date__ = '2016-09-19' +__updated__ = '2018-12-11' + + +def do_update_role(args): + admin._update_role(name=args.role_name, description=args.role_description) + + +def do_create_role(args): + admin._insert_role(name=args.role_name, description=args.role_description) + + +def do_retrieve_role(args): + print(admin._retrieve_role(name=args.role_name)) + + +def do_delete_role(args): + admin._delete_role(name=args.role_name) + + +def do_retrieve(args): + c = None + + if args.query: + if len(args.entities) > 1: + raise Exception("Only one query at a time can be retrieved.") + c = db.execute_query(args.entities[0], flags=eval(args.flags)) + else: + c = db.Container() + + for i in args.entities: + try: + eid = int(i) + c.append(db.Entity(id=eid)) + except ValueError: + c.append(db.Entity(name=i)) + c.retrieve(flags=eval(args.flags)) + print(c) + + +def do_update(args): + fdict = eval(args.flags) + xml = open(args.xml_path, "r") + ret = db.get_connection().update( + entity_uri_segment=["Entity"], reconnect=True, body=xml) + db.Container._response_to_entities(ret) + + +def do_delete(args): + c = db.Container() + + for i in args.entities: + c.append(db.Entity(id=i)) + + c.delete() + + +def do_insert(args): + fdict = eval(args.flags) + xml = open(args.xml_path, "r") + ret = db.get_connection().insert( + entity_uri_segment=["Entity"], + reconnect=True, + query_dict=fdict, + body=xml) + print(db.Container._response_to_entities(ret)) + + +def _promt_for_pw(): + password = getpass.getpass(prompt="Please type password: ") + password2 = getpass.getpass(prompt="Please type password again: ") + + if password != password2: + raise Exception("Password strings didn't match") + + return password + + +def do_create_user(args): + password = args.user_password + + if args.ask_password is True: + password = _promt_for_pw() + try: + admin._insert_user(name=args.user_name, + email=args.user_email, password=password) + + if args.activate_user: + do_activate_user(args) + except HTTPClientError as e: + print(e.msg) + + +def do_activate_user(args): + try: + admin._update_user(name=args.user_name, status="ACTIVE") + except HTTPClientError as e: + print(e.msg) + + +def do_deactivate_user(args): + try: + admin._update_user(name=args.user_name, status="INACTIVE") + except HTTPClientError as e: + print(e.msg) + + +def do_set_user_password(args): + if args.user_password is None: + password = _promt_for_pw() + else: + password = args.user_password + try: + admin._update_user(name=args.user_name, password=password) + except HTTPClientError as e: + print(e.msg) + + +def do_add_user_roles(args): + roles = admin._get_roles(username=args.user_name, realm=None) + + for r in args.user_roles: + roles.add(r) + admin._set_roles(username=args.user_name, roles=roles) + + +def do_remove_user_roles(args): + roles = admin._get_roles(username=args.user_name, realm=None) + + for r in args.user_roles: + if r in roles: + roles.remove(r) + admin._set_roles(username=args.user_name, roles=roles) + + +def do_set_user_entity(args): + admin._update_user(name=args.user_name, entity=args.user_entity) + + +def do_reset_user_entity(args): + admin._update_user(name=args.user_name, entity="") + + +def do_set_user_email(args): + admin._update_user(name=args.user_name, email=args.user_email) + + +def do_retrieve_user(args): + print(admin._retrieve_user(name=args.user_name)) + + +def do_delete_user(args): + admin._delete_user(name=args.user_name) + + +def do_retrieve_user_roles(args): + print(admin._get_roles(username=args.user_name)) + + +def do_retrieve_role_permissions(args): + print(admin._get_permissions(role=args.role_name)) + + +def do_grant_role_permissions(args): + perms = admin._get_permissions(args.role_name) + + for p in args.role_permissions: + g = admin.PermissionRule( + action="Grant", permission=p, priority=args.permissions_priority) + d = admin.PermissionRule( + action="Deny", permission=p, priority=args.permissions_priority) + + if g in perms: + perms.remove(g) + + if d in perms: + perms.remove(d) + perms.add(g) + admin._set_permissions(role=args.role_name, permission_rules=perms) + + +def do_revoke_role_permissions(args): + perms = admin._get_permissions(args.role_name) + + for p in args.role_permissions: + g = admin.PermissionRule( + action="Grant", permission=p, priority=args.permissions_priority) + d = admin.PermissionRule( + action="Deny", permission=p, priority=args.permissions_priority) + + if g in perms: + perms.remove(g) + + if d in perms: + perms.remove(d) + admin._set_permissions(role=args.role_name, permission_rules=perms) + + +def do_deny_role_permissions(args): + perms = admin._get_permissions(args.role_name) + + for p in args.role_permissions: + g = admin.PermissionRule( + action="Grant", permission=p, priority=args.permissions_priority) + d = admin.PermissionRule( + action="Deny", permission=p, priority=args.permissions_priority) + + if g in perms: + perms.remove(g) + + if d in perms: + perms.remove(d) + perms.add(d) + admin._set_permissions(role=args.role_name, permission_rules=perms) + + +def do_retrieve_entity_acl(args): + entities = db.execute_query(q=args.query, flags={"ACL": None}) + + for entity in entities: + print(entity.id) + print(entity.acl) + + +def do_action_entity_permissions(args): + entities = db.execute_query(q=args.query, flags={"ACL": None}) + + for entity in entities: + for p in args.permissions: + getattr(entity, args.action)(role=args.role, priority=args.priority, + permission=p) + entities.update(flags={"ACL": None}) + + for entity in entities: + print(entity.id) + print(entity.acl) + + +def main(argv=None): + """Command line options.""" + + if argv is None: + argv = sys.argv + else: + sys.argv.extend(argv) + + # program_name = os.path.basename(sys.argv[0]) + program_version = "v%s" % __version__ + program_build_date = str(__updated__) + program_version_message = '%%(prog)s %s (%s)' % ( + program_version, program_build_date) + program_shortdesc = __import__('__main__').__doc__ + program_license = '''%s + +USAGE +''' % (program_shortdesc) + + # Setup argument parser + parser = ArgumentParser(description=program_license, + formatter_class=RawDescriptionHelpFormatter) + parser.add_argument('-V', '--version', action='version', + version=program_version_message) + parser.add_argument("--auth-token", metavar="AUTH_TOKEN", + dest="auth_token", + help=("A LinkAhead authentication token (default: None). " + "If the authentication token is passed, the " + "`password_method` of the connection is set to " + "`auth_token` and the respective configuration " + "from the pylinkahead.ini is effectively being " + "overridden.\nTODO: Also allow passing the token " + "via environmenty variables.")) + subparsers = parser.add_subparsers( + title="commands", + metavar="COMMAND", + description="You can invoke the following commands. Print the detailed help for each command with #> linkahead_admin COMMAND -h") + + # users (CRUD) + subparser = subparsers.add_parser( + "create_user", + help="Create a new user in LinkAhead's internal user database. You need " + " to activate the user before use.") + subparser.set_defaults(call=do_create_user) + mg = subparser.add_mutually_exclusive_group() + mg.add_argument("-a", "--ask-password", + help="Prompt for a password.", action="store_true") + mg.add_argument( + "--password", + dest="user_password", + default=None, + help="Alternative way to provide the new user's password. Please " + "consider to use the more secure, interactive way (-a option).") + subparser.add_argument("-c", "--activate-user", + help="Activate the user after creation.", + action="store_true") + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="A user name which is unique in the internal user database.") + subparser.add_argument( + metavar="EMAIL", + nargs='?', + dest="user_email", + help="The email address of the new user.") + + subparser = subparsers.add_parser( + "activate_user", help="(Re-)activate an inactive (but existing) user.") + subparser.set_defaults(call=do_activate_user) + subparser.add_argument(metavar='USERNAME', dest="user_name", + help="The name of the user who is to be activated.") + + subparser = subparsers.add_parser( + "deactivate_user", help="Deactivate an active user.") + subparser.set_defaults(call=do_deactivate_user) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who is to be deactivated.") + + subparser = subparsers.add_parser( + "set_user_password", + help="Set a new password for a user. " + "By default, you will be prompted for the password.") + subparser.set_defaults(call=do_set_user_password) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who's password is to be set.") + subparser.add_argument( + metavar='PASSWORD', + nargs="?", + dest="user_password", + default=None, + help="Alternative way to provide the user's new password. " + "The more secure (and default way) is to provide it interactively.") + + subparser = subparsers.add_parser( + "set_user_entity", + help="Associate a user with an existing entity (which should represent a person, a program, an organization or something similar).") + subparser.set_defaults(call=do_set_user_entity) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who's associated entity you want to set.") + subparser.add_argument(metavar='ENTITY', dest="user_entity", + help="An ID of an existing entity.") + + subparser = subparsers.add_parser( + "reset_user_entity", + help="Terminate the association of a user with an entity.") + subparser.set_defaults(call=do_reset_user_entity) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who's associated entity you want to reset.") + + subparser = subparsers.add_parser( + "set_user_email", help="Set a new email for a user.") + subparser.set_defaults(call=do_set_user_email) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who's email is to be set.") + subparser.add_argument( + metavar='EMAIL', + dest="user_email", + help="The name of the user who's email is to be set.") + + subparser = subparsers.add_parser( + "retrieve_user", help="Retrieve a user (email, entity)") + subparser.set_defaults(call=do_retrieve_user) + subparser.add_argument( + metavar='USERNAME', dest="user_name", help="The name of the user.") + + subparser = subparsers.add_parser( + "delete_user", + help="Delete a user from linkahead's internal user database.") + subparser.set_defaults(call=do_delete_user) + subparser.add_argument(metavar='USERNAME', dest="user_name", + help="The name of the user who is to be deleted.") + + # user roles + subparser = subparsers.add_parser( + "add_user_roles", help="Extend the roles of a user.") + subparser.set_defaults(call=do_add_user_roles) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user who's roles are to be extended.") + subparser.add_argument( + metavar='ROLES', + dest="user_roles", + nargs='+', + help="A space separated list of (existing) roles.") + + subparser = subparsers.add_parser( + "remove_user_roles", help="Remove some of the roles of a user.") + subparser.set_defaults(call=do_remove_user_roles) + subparser.add_argument( + metavar='USERNAME', + dest="user_name", + help="The name of the user from whom you want to take some roles away.") + subparser.add_argument( + metavar='ROLES', + dest="user_roles", + nargs='+', + help="A space separated list of (existing) roles.") + + subparser = subparsers.add_parser( + "retrieve_user_roles", help="Retrieve a user's roles.") + subparser.set_defaults(call=do_retrieve_user_roles) + subparser.add_argument( + metavar='USERNAME', dest="user_name", help="The name of the user.") + + # role permissions + subparser = subparsers.add_parser( + "retrieve_role_permissions", + help="Retrieve the set of permission rules of a role.") + subparser.set_defaults(call=do_retrieve_role_permissions) + subparser.add_argument( + metavar='ROLE', + dest="role_name", + help="The name of the role which permissions are to be retrieved.") + + subparser = subparsers.add_parser( + "grant_role_permissions", help="Grant permissions to a role.") + subparser.set_defaults(call=do_grant_role_permissions) + subparser.add_argument( + '--priority', + dest="permissions_priority", + action="store_true", + default=False, + help="This flag enables priority permission rules.") + subparser.add_argument( + metavar='ROLE', + dest="role_name", + help="The name of the role to which the permissions are to be granted.") + subparser.add_argument( + metavar='PERMISSIONS', + dest="role_permissions", + nargs='+', + help="A space separated list of permissions.") + + subparser = subparsers.add_parser( + "revoke_role_permissions", + help="Remove previously granted or denied permissions from a role.") + subparser.set_defaults(call=do_revoke_role_permissions) + subparser.add_argument( + '--priority', + dest="permissions_priority", + action="store_true", + default=False, + help="This flag is needed to revoke priority permissions.") + subparser.add_argument( + metavar='ROLE', + dest="role_name", + help="The name of the role from which you want to revoke permissions.") + subparser.add_argument( + metavar='PERMISSIONS', + dest="role_permissions", + nargs='+', + help="A space separated list of permissions.") + + subparser = subparsers.add_parser( + "deny_role_permissions", help="Deny a role permissions.") + subparser.set_defaults(call=do_deny_role_permissions) + subparser.add_argument( + '--priority', + dest="permissions_priority", + action="store_true", + default=False, + help="This flag enables priority permission rules.") + subparser.add_argument( + metavar='ROLE', + dest="role_name", + help="The name of the role which you want to deny permissions.") + subparser.add_argument( + metavar='PERMISSIONS', + dest="role_permissions", + nargs='+', + help="A space separated list of permissions.") + + # entities (CRUD) + subparser = subparsers.add_parser("insert", help="Insert entities.") + subparser.set_defaults(call=do_insert) + subparser.add_argument( + '-f', + '--flags', + dest="flags", + help="A python dictionary (dict) with flag keys and their values.", + metavar="FLAGS", + default="{}") + subparser.add_argument(metavar='PATH', dest="xml_path", + help="Path to an xml file.") + + subparser = subparsers.add_parser("retrieve", help="Retrieve entities.") + subparser.set_defaults(call=do_retrieve) + subparser.add_argument( + '-f', + '--flags', + dest="flags", + help="A python dictionary (dict) with flag keys and their values.", + metavar="FLAGS", + default="{}") + subparser.add_argument('-q', '--query', dest='query', action="store_true", + help="If the ENTITIES argument is a query.") + subparser.add_argument(metavar='ENTITIES', dest="entities", nargs='+', + help="A space separated list of ids or names of" + "entities or ai single query.") + + subparser = subparsers.add_parser("update", help="Update entities.") + subparser.set_defaults(call=do_update) + subparser.add_argument( + '-f', + '--flags', + dest="flags", + help="A python dictionary (dict) with flag keys and their values.", + metavar="FLAGS", + default="{}") + subparser.add_argument(metavar='PATH', dest="xml_path", + help="Path to an xml file.") + + subparser = subparsers.add_parser("delete", help="Delete entities.") + subparser.set_defaults(call=do_delete) + subparser.add_argument( + '-f', + '--flags', + dest="flags", + help="A python dictionary (dict) with flag keys and their values.", + metavar="FLAGS", + default="{}") + subparser.add_argument( + metavar='ENTITIES', + dest="entities", + nargs='+', + help="A space separated list of ids or names of entities.") + + # roles (CRUD) + create_role_parser = subparsers.add_parser( + "create_role", help="Create a new role.") + create_role_parser.set_defaults(call=do_create_role) + create_role_parser.add_argument( + dest="role_name", metavar="ROLENAME", help="The name of the new role.") + create_role_parser.add_argument( + dest="role_description", + metavar="DESCRIPTION", + help="A description of the role's purpose, it's intended use case, characteristics of the users who have this role, etc.") + + retrieve_role_parser = subparsers.add_parser( + "retrieve_role", help="Retrieve the description of an existing role.") + retrieve_role_parser.set_defaults(call=do_retrieve_role) + retrieve_role_parser.add_argument( + dest="role_name", + metavar="ROLENAME", + help="The name of the existing role.") + + update_role_parser = subparsers.add_parser( + "update_role", help="Change the description of an existing role.") + update_role_parser.set_defaults(call=do_update_role) + update_role_parser.add_argument( + dest="role_name", + metavar="ROLENAME", + help="The name of the existing role.") + update_role_parser.add_argument( + dest="role_description", + metavar="DESCRIPTION", + help="A new description of the role's purpose, it's intended use case, characteristics of the users who have this role, etc.") + + delete_role_parser = subparsers.add_parser( + "delete_role", help="Delete a role.") + delete_role_parser.set_defaults(call=do_delete_role) + delete_role_parser.add_argument( + dest="role_name", + metavar="ROLENAME", + help="The name of the existing role.") + + # entity acl + retrieve_entity_acl_parser = subparsers.add_parser( + "retrieve_entity_acl", help="Retrieve an entity ACL.") + retrieve_entity_acl_parser.set_defaults(call=do_retrieve_entity_acl) + retrieve_entity_acl_parser.add_argument(dest="query", metavar="QUERY", + help="A FIND query.") + + for action in ["grant", "deny", "revoke_denial", "revoke_grant"]: + action_entity_permissions_parser = subparsers.add_parser( + f"{action}_entity_permissions", + help=f"{action} entity permissions to one or more Entities.") + action_entity_permissions_parser.set_defaults( + call=do_action_entity_permissions, action=action) + action_entity_permissions_parser.add_argument(dest="query", metavar="QUERY", + help="A FIND query.") + action_entity_permissions_parser.add_argument(dest="role", metavar="ROLE", + help="The name of an exising role.") + action_entity_permissions_parser.add_argument( + dest="permissions", + metavar="PERMISSION", + help="A list of permissions", + nargs='+') + action_entity_permissions_parser.add_argument( + '--priority', + dest="priority", + action="store_true", + default=False, + help="This flag enables priority permission rules.") + + # Process arguments + args = parser.parse_args() + auth_token = args.auth_token + if auth_token is not None: + db.configure_connection(password_method="auth_token", + auth_token=auth_token) + else: + db.configure_connection() + + return args.call(args) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/src/linkahead/utils/plantuml.py b/src/linkahead/utils/plantuml.py new file mode 100644 index 0000000000000000000000000000000000000000..e5432dcebff7bd7aef83d2ad0355b34d82fbf331 --- /dev/null +++ b/src/linkahead/utils/plantuml.py @@ -0,0 +1,415 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""Utilities for work with PlantUML. + +PlantUML (http://plantuml.com) is a converter from a simple +descriptive language to graphviz diagrams. + +To convert the output, you can write it into FILENAME.pu and then +convert it with: + +plantuml FILENAME.pu -> FILENAME.png +""" + +import os +import shutil + +import linkahead as db +from linkahead.common.datatype import is_reference, get_referenced_recordtype + +from typing import List, Optional + +import tempfile + +REFERENCE = "REFERENCE" + + +def get_description(description_str): + """Extract and format a description string from a record type or property. + + Parameters + ---------- + description_str : str + The description string that is going to be formatted. + + Returns + ------- + str + The reformatted description ending in a line break. + """ + words = description_str.split() + lines = [] + lines.append("") + + for w in words: + if len(lines[-1] + w) > 60: + lines.append("") + + if len(lines[-1]) > 0: + lines[-1] += " " + lines[-1] += w + description = "\n".join(lines) + + return description + "\n" + + +class Grouped(object): + def __init__(self, name, parents): + self.name = name + self.parents = parents + + def get_parents(self): + return self.parents + + +def recordtypes_to_plantuml_string(iterable, + add_properties: bool = True, + add_recordtypes: bool = True, + add_legend: bool = True, + no_shadow: bool = False, + style: str = "default"): + """Converts RecordTypes into a string for PlantUML. + + This function obtains an iterable and returns a string which can + be input into PlantUML for a representation of all RecordTypes in + the iterable. + + Current options for style + ------------------------- + + "default" - Standard rectangles with uml class circle and methods section + "salexan" - Round rectangles, hide circle and methods section + + Current limitations + ------------------- + + - It is inherently hard to detect if an element should be rendered + as a class/RecordType or not. Currently it is rendered if + either the "type" attribute is None or + type(element) == RecordType. + - Inheritance of Properties is not rendered nicely at the moment. + + Parameters + ---------- + iterable: iterable of linkahead.Entity + The objects to be rendered with plantuml. + + no_shadow : bool, optional + If true, tell plantuml to use a skin without blurred shadows. + + + Returns + ------- + out : str + The plantuml string for the given container. + """ + + # TODO: This function needs a review of python type hints. + + classes = [el for el in iterable + if isinstance(el, db.RecordType)] + dependencies = {} + inheritances = {} + properties = [p for p in iterable if isinstance(p, db.Property)] + grouped = [g for g in iterable if isinstance(g, Grouped)] + + def _add_properties(c, importance=None): + result = "" + + for p in c.get_properties(): + if importance is None or c.get_properties().get_importance(p) == importance: + if importance is not None and len(result) == 0: + result += ".." + importance.lower() + "..\n" + name = p.name + p_type = p.datatype + + if p_type is None: + # get type from properties + + for p2 in properties: + if p2.name == p.name: + p_type = p2.datatype + + if p_type is None: + # is reference? + + for p2 in classes: + if p2.name == p.name: + p_type = p2 + + if isinstance(p_type, db.Entity): + p_type = p_type.name + dependencies[c].append(p_type) + elif p_type is not None: + for c2 in classes: + if c2.name == p_type or db.LIST(c2.name) == p_type: + dependencies[c].append(c2.name) + result += ' {name} ({type})\n'.format( + name=name, type=p_type) + + return result + + result = "@startuml\n\n" + + if no_shadow: + result += "skinparam shadowing false\n" + + if style == "default": + result += "skinparam classAttributeIconSize 0\n" + elif style == "salexan": + result += """skinparam roundcorner 20\n +skinparam boxpadding 20\n +\n +hide methods\n +hide circle\n +""" + else: + raise ValueError("Unknown style.") + + if add_properties: + result += "package Properties #DDDDDD {\n" + for p in properties: + inheritances[p] = p.get_parents() + dependencies[p] = [] + + result += "class \"{klass}\" << (P,#008800) >> {{\n".format(klass=p.name) + + if p.description is not None: + result += get_description(p.description) + result += "\n..\n" + + if isinstance(p.datatype, str): + result += "datatype: " + p.datatype + "\n" + elif isinstance(p.datatype, db.Entity): + result += "datatype: " + p.datatype.name + "\n" + else: + result += "datatype: " + str(p.datatype) + "\n" + result += "}\n\n" + result += "}\n\n" + + if add_recordtypes: + result += "package RecordTypes #DDDDDD {\n" + + for c in classes: + inheritances[c] = c.get_parents() + dependencies[c] = [] + result += "class \"{klass}\" << (C,#FF1111) >> {{\n".format(klass=c.name) + + if c.description is not None: + result += get_description(c.description) + + props = "" + props += _add_properties(c, importance=db.FIX) + props += _add_properties(c, importance=db.OBLIGATORY) + props += _add_properties(c, importance=db.RECOMMENDED) + props += _add_properties(c, importance=db.SUGGESTED) + + if len(props) > 0: + result += "__Properties__\n" + props + else: + result += "\n..\n" + result += "}\n\n" + + for g in grouped: + inheritances[g] = g.get_parents() + result += "class \"{klass}\" << (G,#0000FF) >> {{\n".format(klass=g.name) + result += "}\n\n" + + for c, parents in inheritances.items(): + for par in parents: + result += "\"{par}\" <|-- \"{klass}\"\n".format( + klass=c.name, par=par.name) + + for c, deps in dependencies.items(): + for dep in deps: + result += "\"{klass}\" *-- \"{dep}\"\n".format( + klass=c.name, dep=dep) + + if add_legend: + result += """ + +package \"B is a subtype of A\" <<Rectangle>> { + A <|-right- B + note "This determines what you find when you query for the RecordType.\\n'FIND RECORD A' will provide Records which have a parent\\nA or B, while 'FIND RECORD B' will provide only Records which have a parent B." as N1 +} +""" + result += """ + +package \"The property P references an instance of D\" <<Rectangle>> { + class C { + P(D) + } + C *-right- D + note "Employ this when searching for C: 'FIND RECORD C WITH D'\\nOr if the value of D is a Record: 'FIND RECORD C WHICH REFERENCES D' is possible.\\nEmploying this while searching for D: 'FIND RECORD D WHICH IS REFERENCED BY C" as N2 +} + +""" + + result += "\n@enduml\n" + + return result + + +def retrieve_substructure(start_record_types, depth, result_id_set=None, result_container=None, cleanup=True): + """Recursively retrieves LinkAhead record types and properties, starting + from given initial types up to a specific depth. + + Parameters + ---------- + start_record_types : Iterable[db.Entity] + Iterable with the entities to be displayed. Starting from these + entities more entities will be retrieved. + depth : int + The maximum depth up to which to retriev sub entities. + result_id_set : set[int] + Used by recursion. Filled with already visited ids. + result_container : db.Container + Used by recursion. Filled with already visited entities. + cleanup : bool + Used by recursion. If True return the resulting result_container. + Don't return anything otherwise. + + Returns + ------- + db.Container + A container containing all the retrieved entites + or None if cleanup is False. + """ + # Initialize the id set and result container for level zero recursion depth: + if result_id_set is None: + result_id_set = set() + if result_container is None: + result_container = db.Container() + + for entity in start_record_types: + entity.retrieve() + if entity.id not in result_id_set: + result_container.append(entity) + result_id_set.add(entity.id) + for prop in entity.properties: + if (is_reference(prop.datatype) and prop.datatype != db.FILE and depth > 0): + rt = db.RecordType( + name=get_referenced_recordtype(prop.datatype)).retrieve() + retrieve_substructure([rt], depth-1, result_id_set, + result_container, False) + # TODO: clean up this hack + # TODO: make it also work for files + if is_reference(prop.datatype) and prop.value is not None: + r = db.Record(id=prop.value).retrieve() + retrieve_substructure([r], depth-1, result_id_set, result_container, False) + if r.id not in result_id_set: + result_container.append(r) + result_id_set.add(r.id) + + if prop.id not in result_id_set: + result_container.append(prop) + result_id_set.add(prop.id) + + for parent in entity.parents: + rt = db.RecordType(id=parent.id).retrieve() + if parent.id not in result_id_set: + result_container.append(rt) + result_id_set.add(parent.id) + if depth > 0: + retrieve_substructure([rt], depth-1, result_id_set, + result_container, False) + + if cleanup: + return result_container + return None + + +def to_graphics(recordtypes: List[db.Entity], filename: str, + output_dirname: Optional[str] = None, + formats: List[str] = ["tsvg"], + silent: bool = True, + add_properties: bool = True, + add_recordtypes: bool = True, + add_legend: bool = True, + no_shadow: bool = False, + style: str = "default"): + """Calls recordtypes_to_plantuml_string(), saves result to file and + creates an svg image + + plantuml needs to be installed. + + Parameters + ---------- + recordtypes : Iterable[db.Entity] + Iterable with the entities to be displayed. + filename : str + filename of the image without the extension(e.g. data_structure; + also without the preceeding path. + data_structure.pu and data_structure.svg will be created.) + output_dirname : str + the destination directory for the resulting images as defined by the "-o" + option by plantuml + default is to use current working dir + formats : List[str] + list of target formats as defined by the -t"..." options by plantuml, e.g. "tsvg" + silent : bool + Don't output messages. + no_shadow : bool, optional + If true, tell plantuml to use a skin without blurred shadows. + """ + pu = recordtypes_to_plantuml_string(iterable=recordtypes, + add_properties=add_properties, + add_recordtypes=add_recordtypes, + add_legend=add_legend, + no_shadow=no_shadow, + style=style) + + if output_dirname is None: + output_dirname = os.getcwd() + + allowed_formats = [ + "tpng", "tsvg", "teps", "tpdf", "tvdx", "txmi", + "tscxml", "thtml", "ttxt", "tutxt", "tlatex", "tlatex:nopreamble"] + + with tempfile.TemporaryDirectory() as td: + + pu_filename = os.path.join(td, filename + ".pu") + with open(pu_filename, "w") as pu_file: + pu_file.write(pu) + + for format in formats: + extension = format[1:] + if ":" in extension: + extension = extension[:extension.index(":")] + + if format not in allowed_formats: + raise RuntimeError("Format not allowed.") + cmd = "plantuml -{} {}".format(format, pu_filename) + if not silent: + print("Executing:", cmd) + + if os.system(cmd) != 0: # TODO: replace with subprocess.run + raise Exception("An error occured during the execution of " + "plantuml when using the format {}. " + "Is plantuml installed? " + "You might want to dry a different format.".format(format)) + # copy only the final product into the target directory + shutil.copy(os.path.join(td, filename + "." + extension), + output_dirname) diff --git a/src/linkahead/utils/register_tests.py b/src/linkahead/utils/register_tests.py new file mode 100644 index 0000000000000000000000000000000000000000..6909544fed5a6f80572f60ba102c72b53568d897 --- /dev/null +++ b/src/linkahead/utils/register_tests.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# encoding: utf-8 +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2022 Alexander Schlemmer <alexander.schlemmer@ds.mpg.de> +# Copyright (C) 2022 Timm Fitschen <t.fitschen@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. + +import linkahead as db +from linkahead import administration as admin + +""" +This module implements a registration procedure for integration tests which +need a running LinkAhead instance. + +It ensures that tests do not accidentally overwrite data in real LinkAhead +instances, as it checks whether the running LinkAhead instance is actually the +correct one, that +should be used for these tests. + +The test files have to define a global variable TEST_KEY which must be unique +for each test using + +set_test_key("ABCDE") + +The test procedure (invoked by pytest) checks whether a registration +information is stored in one of the server properties or otherwise +- offers to register this test in the currently running database ONLY if this + is empty. +- fails otherwise with a RuntimeError + +NOTE: you probably need to use pytest with the -s option to be able to + register the test interactively. Otherwise, the server property has to be + set before server start-up in the server.conf of the LinkAhead server. + +This module is intended to be used with pytest. + +There is a pytest fixture "clear_database" that performs the above mentioned +checks and clears the database in case of success. +""" + +TEST_KEY = None + + +def set_test_key(KEY): + global TEST_KEY + TEST_KEY = KEY + + +def _register_test(): + res = db.execute_query("COUNT Entity") + if not isinstance(res, int): + raise RuntimeError("Response from server for Info could not be interpreted.") + if res > 0: + raise RuntimeError("This instance of LinkAhead contains entities already." + "It must be empty in order to register a new test.") + + print("Current host of LinkAhead instance is: {}".format( + db.connection.connection.get_connection()._delegate_connection.setup_fields["host"])) + answer = input("This method will register your current test with key {} with the currently" + " running instance of LinkAhead. Do you want to continue (y/N)?".format( + TEST_KEY)) + if answer != "y": + raise RuntimeError("Test registration aborted by user.") + + admin.set_server_property("_CAOSDB_INTEGRATION_TEST_SUITE_KEY", + TEST_KEY) + + +def _get_registered_test_key(): + try: + return admin.get_server_property("_CAOSDB_INTEGRATION_TEST_SUITE_KEY") + except KeyError: + return None + + +def _is_registered(): + registered_test_key = _get_registered_test_key() + if not registered_test_key: + return False + elif registered_test_key == TEST_KEY: + return True + else: + raise RuntimeError("The database has been setup for a different test.") + + +def _assure_test_is_registered(): + global TEST_KEY + if TEST_KEY is None: + raise RuntimeError("TEST_KEY is not defined.") + if not _is_registered(): + answer = input("Do you want to register this instance of LinkAhead" + " with the current test? Do you want to continue (y/N)?") + if answer == "y": + _register_test() + raise RuntimeError("Test has been registered. Please rerun tests.") + else: + raise RuntimeError("The database has not been setup for this test.") + + +def _clear_database(): + c = db.execute_query("FIND ENTITY WITH ID>99") + c.delete(raise_exception_on_error=False) + return None + + +try: + import pytest + + @pytest.fixture + def clear_database(): + """Remove Records, RecordTypes, Properties, and Files ONLY IF the LinkAhead + server the current connection points to was registered with the appropriate key. + + PyTestInfo Records and the corresponding RecordType and Property are preserved. + """ + _assure_test_is_registered() + yield _clear_database() # called before the test function + _clear_database() # called after the test function +except ImportError: + raise Warning("""The register_tests module depends on pytest and is + intended to be used in integration test suites for the + linkahead-pylib library only.""") diff --git a/src/linkahead/utils/server_side_scripting.py b/src/linkahead/utils/server_side_scripting.py new file mode 100644 index 0000000000000000000000000000000000000000..06caa3d94a629e368dc99f83dc2957c756b7b487 --- /dev/null +++ b/src/linkahead/utils/server_side_scripting.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> +# Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# +"""server_side_scripting + +Helper functions for calling server-side scripts. +""" +from urllib.parse import quote +from lxml import etree + +from linkahead.connection.connection import get_connection +from linkahead.connection.utils import urlencode +from linkahead.connection.encode import (MultipartParam, multipart_encode, + ReadableMultiparts) + + +def _make_params(pos_args, opts): + """Create and return option string components. + +The return value is a dict with be something like `-O<key>`:`<value>` from `opts` and +`-p{0,1,2,3,...}`:`<value>` from `pos_args`. + + """ + result = {} + for key, val in opts.items(): + result["-O{key}".format(key=key)] = str(val) + for i, val in enumerate(pos_args): + result["-p{i}".format(i=i)] = str(val) + return result + + +def _make_multipart_request(call, pos_args, opts, files): + """Return body and header for an HTTP request. + """ + parts = list() + params = _make_params(pos_args, opts) + + parts.append(MultipartParam("call", call)) + for key, val in params.items(): + parts.append(MultipartParam(key, val)) + + for paramname, filename in files.items(): + parts.append(MultipartParam.from_file(paramname=paramname, + filename=filename)) + + body, headers = multipart_encode(parts) + body = ReadableMultiparts(body) + return body, headers + + +def _make_form_request(call, pos_args, opts): + """Return URL from call and argumewnts, and headers for urlencoding.""" + form = dict() + form["call"] = call + + params = _make_params(pos_args, opts) + for key, val in params.items(): + form[key] = val + + headers = {} + headers["Content-Type"] = "application/x-www-form-urlencoded" + return urlencode(form), headers + + +def _make_request(call, pos_args, opts, files=None): + """ + Multipart if with files, otherwise url-encoded. + + Return + ------ + path_segments, body, headers + """ + + if files is not None: + return _make_multipart_request(call, pos_args, opts, files) + + return _make_form_request(call, pos_args, opts) + + +def run_server_side_script(call, *args, files=None, **kwargs): + """ + + Return + ------ + response : ScriptingResponse + """ + body, headers = _make_request(call=call, pos_args=args, + opts=kwargs, files=files) + response = get_connection()._http_request(method="POST", + path=quote("scripting"), + body=body, + headers=headers) + xml = etree.parse(response) + code = int(xml.xpath("/Response/script/@code")[0]) + call = xml.xpath("/Response/script/call")[0].text + stdout = xml.xpath("/Response/script/stdout")[0].text + stderr = xml.xpath("/Response/script/stderr")[0].text + + return ScriptingResponse(call=call, + code=code, + stdout=stdout, + stderr=stderr) + + +class ScriptingResponse(): + """ScriptingResponse + + A data class for the response of server-side scripting calls. + + Properties + ---------- + code : int + The return code of the script process. + call : str + The complete call of the script minus the absolute path and the + auth_token. + stdout : str + The STDOUT of the script process. + stderr : str + The STDERR of the script process. + + """ + + def __init__(self, call, code, stdout, stderr): + self.call = call + self.code = code + self.stdout = stdout + self.stderr = stderr diff --git a/src/linkahead/yamlapi.py b/src/linkahead/yamlapi.py new file mode 100644 index 0000000000000000000000000000000000000000..8b5eda55460ae085de11c6bc2507dd98e578d682 --- /dev/null +++ b/src/linkahead/yamlapi.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# +# ** header v3.0 +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2018 Research Group Biomedical Physics, +# Max-Planck-Institute for Dynamics and Self-Organization Göttingen +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# +# ** end header +# + +"""!!! Deprecated !!! YAML interface for the database (LinkAhead)""" + +import yaml +from lxml import etree +from lxml.etree import Element +import re +import linkahead +import linkahead.common.utils as utils +from linkahead.connection.connection import get_connection +import warnings + + +def append_sublist(v, newel, def_entity_type): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + if v is None: + return + for i in v: + if isinstance(i, dict): + entity_type = def_entity_type + if "entity" in i: + entity_type = i["entity"] + del i["entity"] + newel.append(kv_to_xml(entity_type, i)) + + +def kv_to_xml(k, v): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + newel = Element(k) + # code.interact(local=locals()) + if isinstance(v, list): # Top level loop + append_sublist(v, newel, "Property") + elif isinstance(v, dict): + if "value" in v: + newel.text = v["value"] + del v["value"] + subdict = {"properties": "Property", "parents": "Parent"} + for jk, jv in subdict.items(): + if jk in v: + append_sublist(v[jk], newel, jv) + del v[jk] + for k2, v2 in v.items(): + newel.set(k2, str(v2)) + return newel + + +def dict_to_xml(d): + """ + d: The dictionary (possibly loaded from yaml) + to convert to linkahead-xml. + """ + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + return kv_to_xml("Entities", d) + + +def yaml_to_xml(yamlstr): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + """Load a yaml document from yamlstr and converts it to XML. + + Parameters + ---------- + yamlstr : str + The string to load the yaml document from. + + """ + return dict_to_xml(yaml.load(yamlstr, Loader=yaml.SafeLoader)) + + +def process(text): + """Do some replacements on the original file to obtain valid yaml.""" + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + processed = re.sub( + "^(\\s*)-\\s*\\{?(.*)\\}?\\s*$", + "\\1- {\\2}", + text, + flags=re.MULTILINE) + processed = re.sub("^(\\s*)\\+\\s*(.*)\\s*$", "\\1- \\2", + processed, flags=re.MULTILINE) + print(processed) + return processed + + +def yaml_file_to_xml(yamlfilename): + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + with open(yamlfilename, "r") as f: + return yaml_to_xml(process(f.read())) + + +def insert_yaml_file(yamlfilename, simulate=False): + """Inserts the contents of 'yamlfilename' into the database. + + Set 'simulate' to True if you don't actually want to insert the xml, + but only receive what would be sent. + """ + warnings.warn(""" + This function is deprecated and will be removed with the next release. + Please use linkahead-advanced-user-tools/models/data_model.py for a + similar functionality.""", DeprecationWarning) + con = get_connection() + prs = etree.XMLParser(remove_blank_text=True) + sent_xml = etree.tostring( + etree.fromstring( + etree.tostring( + yaml_file_to_xml(yamlfilename)), + prs), + pretty_print=True) + if simulate: + return "", sent_xml.decode("utf-8") + response = con.insert(entity_uri_segment="Entity/", + body=sent_xml) + resp_text = response.readall() + resp_elem = etree.fromstring(resp_text, prs) + for i in resp_elem.iter("Error"): + print("ERROR: " + i.get("description")) + child = i.getparent() + while child is not None: + childname = "" + childid = "" + # print(etree.tostring(child)) + if child.get("name") is not None: + childname = child.get("name") + if child.get("id") is not None: + childid = child.get("id") + print(" in " + child.tag + " " + childname + " " + childid) + child = child.getparent() + return etree.tostring(resp_elem, + pretty_print=True).decode( + "utf-8"), sent_xml.decode("utf-8") diff --git a/unittests/broken_configs/pycaosdb1.ini b/unittests/broken_configs/pylinkahead1.ini similarity index 100% rename from unittests/broken_configs/pycaosdb1.ini rename to unittests/broken_configs/pylinkahead1.ini diff --git a/unittests/broken_configs/pycaosdb2.ini b/unittests/broken_configs/pylinkahead2.ini similarity index 100% rename from unittests/broken_configs/pycaosdb2.ini rename to unittests/broken_configs/pylinkahead2.ini diff --git a/unittests/broken_configs/pycaosdb3.ini b/unittests/broken_configs/pylinkahead3.ini similarity index 100% rename from unittests/broken_configs/pycaosdb3.ini rename to unittests/broken_configs/pylinkahead3.ini diff --git a/unittests/broken_configs/pycaosdb4.ini b/unittests/broken_configs/pylinkahead4.ini similarity index 100% rename from unittests/broken_configs/pycaosdb4.ini rename to unittests/broken_configs/pylinkahead4.ini diff --git a/unittests/docker/Dockerfile b/unittests/docker/Dockerfile index 7c84050b0a55ae6e1e8f2e2583f894a69f691193..a5f355fe23d00449ea470fa80f81a4e8e1914242 100644 --- a/unittests/docker/Dockerfile +++ b/unittests/docker/Dockerfile @@ -9,6 +9,7 @@ RUN apt-get update && \ curl pycodestyle \ python3-sphinx ARG COMMIT="dev" -RUN git clone -b dev https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git && \ - cd caosdb-pylib && git checkout $COMMIT && pip3 install . +# TODO Rename to linkahead +RUN git clone -b dev https://gitlab.indiscale.com/caosdb/src/linkahead-pylib.git linkahead-pylib && \ + cd linkahead-pylib && git checkout $COMMIT && pip3 install . RUN pip3 install recommonmark sphinx-rtd-theme diff --git a/unittests/test_acl.py b/unittests/test_acl.py index 633c25ad5c4046c0fa41b66049bdf56aa695f482..c004979fd4fc437bdca5a3d037417e5f47f45b42 100644 --- a/unittests/test_acl.py +++ b/unittests/test_acl.py @@ -1,6 +1,6 @@ # -*- encoding: utf-8 -*- # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2022 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2022 Timm Fitschen <f.fitschen@indiscale.com> @@ -18,7 +18,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. # -import caosdb as db +import linkahead as db from lxml import etree diff --git a/unittests/test_add_property.py b/unittests/test_add_property.py index 0d3183b4c0ca5517ecea68d0e49bbf335bb2a13e..2989adec5ded5dd227b21771a0d723c45952782a 100644 --- a/unittests/test_add_property.py +++ b/unittests/test_add_property.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -24,7 +24,7 @@ # ** end header # from pytest import raises -import caosdb as db +import linkahead as db def test_no_parameter(): diff --git a/unittests/test_administraction.py b/unittests/test_administraction.py index 25a7d0de7d2a591135ddf21530f23ad532101c53..7f40aa9437b327e140ab0de3a3438522a31b200c 100644 --- a/unittests/test_administraction.py +++ b/unittests/test_administraction.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -25,8 +25,8 @@ # pylint: disable=missing-docstring from __future__ import unicode_literals from pytest import raises -from caosdb import administration, configure_connection, get_connection -from caosdb.connection.mockup import MockUpServerConnection, MockUpResponse +from linkahead import administration, configure_connection, get_connection +from linkahead.connection.mockup import MockUpServerConnection, MockUpResponse def setup_module(): diff --git a/unittests/test_apiutils.py b/unittests/test_apiutils.py index bda381cf6427377194e272dfa14b83399b6f012f..bb6f978bb83c4ee32cc485f538b8807c8f7012dd 100644 --- a/unittests/test_apiutils.py +++ b/unittests/test_apiutils.py @@ -1,5 +1,5 @@ # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2022 Florian Spreckelsen <f.spreckelsen@indiscale.com> @@ -27,13 +27,13 @@ import pytest -import caosdb as db -import caosdb.apiutils -from caosdb.apiutils import (apply_to_ids, compare_entities, create_id_query, - empty_diff, EntityMergeConflictError, - resolve_reference, merge_entities) +import linkahead as db +import linkahead.apiutils +from linkahead.apiutils import (apply_to_ids, compare_entities, create_id_query, + empty_diff, EntityMergeConflictError, + resolve_reference, merge_entities) -from caosdb.common.models import SPECIAL_ATTRIBUTES +from linkahead.common.models import SPECIAL_ATTRIBUTES def test_apply_to_ids(): @@ -60,8 +60,8 @@ def test_id_query(): def test_resolve_reference(): - original_retrieve_entity_with_id = caosdb.apiutils.retrieve_entity_with_id - caosdb.apiutils.retrieve_entity_with_id = lambda eid: db.Record(id=eid) + original_retrieve_entity_with_id = linkahead.apiutils.retrieve_entity_with_id + linkahead.apiutils.retrieve_entity_with_id = lambda eid: db.Record(id=eid) prop = db.Property(id=1, datatype=db.REFERENCE, value=100) prop.is_valid = lambda: True @@ -93,7 +93,7 @@ def test_resolve_reference(): assert no_reference.datatype is db.INTEGER # restore retrive_entity_with_id - caosdb.apiutils.retrieve_entity_with_id = original_retrieve_entity_with_id + linkahead.apiutils.retrieve_entity_with_id = original_retrieve_entity_with_id def test_compare_entities(): diff --git a/unittests/test_authentication_auth_token.py b/unittests/test_authentication_auth_token.py index d0eb6b90883951af584d42a80e319c14891f6e50..3142f1f9f54230cb19666eeb8ff5809a906f9d49 100644 --- a/unittests/test_authentication_auth_token.py +++ b/unittests/test_authentication_auth_token.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> @@ -23,17 +23,17 @@ # """test_authentication_auth_token -Unit tests for the module caosdb.connection.authentication.auth_token +Unit tests for the module linkahead.connection.authentication.auth_token """ from __future__ import unicode_literals from pytest import raises from unittest.mock import Mock -from caosdb.connection.authentication import auth_token as at -from caosdb.connection.mockup import MockUpServerConnection, MockUpResponse -from caosdb.connection.utils import parse_auth_token -from caosdb.exceptions import LoginFailedError -from caosdb import configure_connection +from linkahead.connection.authentication import auth_token as at +from linkahead.connection.mockup import MockUpServerConnection, MockUpResponse +from linkahead.connection.utils import parse_auth_token +from linkahead.exceptions import LoginFailedError +from linkahead import configure_connection def test_get_authentication_provider(): diff --git a/unittests/test_authentication_external.py b/unittests/test_authentication_external.py index a8fc6f79578c9812dab52dd1fa3807f62fd710fb..66a584ca4b2e6a2ee49bf1f16ae25ff26983fdd6 100644 --- a/unittests/test_authentication_external.py +++ b/unittests/test_authentication_external.py @@ -1,7 +1,7 @@ #! -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -29,7 +29,7 @@ Tests for the external_credentials_provider modul. from __future__ import unicode_literals import logging from pytest import raises -from caosdb.connection.authentication import ( +from linkahead.connection.authentication import ( external_credentials_provider as ecp ) diff --git a/unittests/test_authentication_keyring.py b/unittests/test_authentication_keyring.py index 715514498b406478a3bddcc64c7794316f64368e..95f0ad1f61072297a7eacf2e6601ea8ff83e8bbf 100644 --- a/unittests/test_authentication_keyring.py +++ b/unittests/test_authentication_keyring.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -23,11 +23,11 @@ # """test_authentication_keyring. -Tests for the caosdb.connection.authentication.keyring module. +Tests for the linkahead.connection.authentication.keyring module. """ import sys from pytest import raises -from caosdb.connection.authentication.keyring import KeyringCaller +from linkahead.connection.authentication.keyring import KeyringCaller def test_initialization(): diff --git a/unittests/test_authentication_pass.py b/unittests/test_authentication_pass.py index 45bda08a46bcc95aa73e5609f053f3ac178901dc..782577b26bb5ae0ed3b8c74cc5ac4b21b59562f0 100644 --- a/unittests/test_authentication_pass.py +++ b/unittests/test_authentication_pass.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -23,11 +23,11 @@ # """test_authentication_pass. -Tests for the caosdb.connection.authentication.pass module. +Tests for the linkahead.connection.authentication.pass module. """ import sys from pytest import raises -_PASSCALLER = "caosdb.connection.authentication.pass" +_PASSCALLER = "linkahead.connection.authentication.pass" __import__(_PASSCALLER) PassCaller = sys.modules[_PASSCALLER].PassCaller diff --git a/unittests/test_authentication_plain.py b/unittests/test_authentication_plain.py index 146b59889c71c86ea77fb4ae962118cdda1afb06..fe11787633c9f12ccec9ef27b89e446a0ddeb1b5 100644 --- a/unittests/test_authentication_plain.py +++ b/unittests/test_authentication_plain.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -23,12 +23,12 @@ # """test_authentication_plain. -Unit tests for the modul caosdb.connection.authentication.plain. +Unit tests for the modul linkahead.connection.authentication.plain. """ from __future__ import unicode_literals -from caosdb.connection.authentication.plain import PlainTextCredentialsProvider +from linkahead.connection.authentication.plain import PlainTextCredentialsProvider from pytest import raises @@ -67,4 +67,4 @@ def test_plain_has_logger(): assert hasattr(p, "logger") assert "authentication" in p.logger.name assert "connection" in p.logger.name - assert "caosdb" in p.logger.name + assert "linkahead" in p.logger.name diff --git a/unittests/test_authentication_unauthenticated.py b/unittests/test_authentication_unauthenticated.py index 45a709fcc62b609a97de7e87dd6c6f6ac94a55a1..e039dc0bc9cd064ffe1d49d9f4e0de2aa7f7cd61 100644 --- a/unittests/test_authentication_unauthenticated.py +++ b/unittests/test_authentication_unauthenticated.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> @@ -23,17 +23,17 @@ # """test_authentication_unauthenticated -Unit tests for the module caosdb.connection.authentication.unauthenticated. +Unit tests for the module linkahead.connection.authentication.unauthenticated. """ from __future__ import unicode_literals from pytest import raises from unittest.mock import Mock -from caosdb.connection.authentication import unauthenticated -from caosdb.connection.mockup import MockUpServerConnection, MockUpResponse -from caosdb.connection.utils import parse_auth_token -from caosdb.exceptions import LoginFailedError -from caosdb import configure_connection +from linkahead.connection.authentication import unauthenticated +from linkahead.connection.mockup import MockUpServerConnection, MockUpResponse +from linkahead.connection.utils import parse_auth_token +from linkahead.exceptions import LoginFailedError +from linkahead import configure_connection from .test_authentication_auth_token import response_with_auth_token diff --git a/unittests/test_cached.py b/unittests/test_cached.py index ce302d671d6077aed7d8457e70da2076ebe65d50..29404eea07a0e9e8be270f45ccf2952a4deac735 100644 --- a/unittests/test_cached.py +++ b/unittests/test_cached.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> # Copyright (C) 2023 IndiScale GmbH <info@indiscale.com> @@ -19,12 +19,12 @@ # along with this program. If not, see <https://www.gnu.org/licenses/>. # -""" Test the caosdb.cached module """ +""" Test the linkahead.cached module """ -from caosdb.cached import (cached_get_entity_by, cache_clear, cache_info, cache_fill, - AccessType, cache_initialize, cached_query) +from linkahead.cached import (cached_get_entity_by, cache_clear, cache_info, cache_fill, + AccessType, cache_initialize, cached_query) from unittest.mock import patch -import caosdb as db +import linkahead as db from copy import deepcopy import pytest @@ -73,7 +73,7 @@ def mocked_gen_query(q, unique): return db.Container().extend(DUMMY_SERVER_CONTENT) -@patch("caosdb.utils.get_entity.get_entity_by_name") +@patch("linkahead.utils.get_entity.get_entity_by_name") def test_get_by_name(mocked_get_by_name): mocked_get_by_name.side_effect = mocked_name_query # first call; not in cache -> mocked_execute is touched @@ -107,7 +107,7 @@ def test_get_by_name(mocked_get_by_name): assert c.id == 103 -@patch("caosdb.utils.get_entity.get_entity_by_id") +@patch("linkahead.utils.get_entity.get_entity_by_id") def test_get_by_id(mocked_get_by_id): mocked_get_by_id.side_effect = mocked_id_query # first call; not in cache -> mocked_execute is touched @@ -142,7 +142,7 @@ def test_get_by_id(mocked_get_by_id): assert c.name == 'c' -@patch("caosdb.cached.get_entity.get_entity_by_path") +@patch("linkahead.cached.get_entity.get_entity_by_path") def test_get_by_path(mocked_get_by_path): mocked_get_by_path.side_effect = mocked_path_query # first call; not in cache -> mocked_execute is touched @@ -176,7 +176,7 @@ def test_get_by_path(mocked_get_by_path): assert c.id == 105 -@patch("caosdb.cached.execute_query") +@patch("linkahead.cached.execute_query") def test_get_by_query(mocked_query): mocked_query.side_effect = mocked_gen_query # test cache initialization @@ -205,7 +205,7 @@ def test_get_by_query(mocked_query): assert cache_info().misses == 2 -@patch("caosdb.cached.execute_query") +@patch("linkahead.cached.execute_query") def test_cached_query(mocked_query): mocked_query.side_effect = mocked_gen_query # test cache initialization @@ -243,7 +243,7 @@ def test_cached_query(mocked_query): assert c[0].id == 101 -@patch("caosdb.utils.get_entity.get_entity_by_name") +@patch("linkahead.utils.get_entity.get_entity_by_name") def test_cache_size(mocked_get_by_name): mocked_get_by_name.side_effect = lambda x: x # first call; not in cache -> mocked_execute is touched diff --git a/unittests/test_concrete_property.py b/unittests/test_concrete_property.py index 0e5c28534c7ac404b829df575225f42e908adb01..e70668f02aab12762a342f035a974f708652ae69 100644 --- a/unittests/test_concrete_property.py +++ b/unittests/test_concrete_property.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -24,9 +24,9 @@ """Tests for the _ConcreteProperty class.""" -from caosdb import configure_connection -from caosdb.common.models import _ConcreteProperty -from caosdb.connection.mockup import MockUpServerConnection +from linkahead import configure_connection +from linkahead.common.models import _ConcreteProperty +from linkahead.connection.mockup import MockUpServerConnection # pylint: disable=missing-docstring from nose.tools import assert_equal as eq from nose.tools import assert_is_not_none as there diff --git a/unittests/test_configs/pycaosdb-server-side-scripting.ini b/unittests/test_configs/pycaosdb-server-side-scripting.ini deleted file mode 100644 index de2867f8dc66b3e81f10f35e40c36f9cb8591604..0000000000000000000000000000000000000000 --- a/unittests/test_configs/pycaosdb-server-side-scripting.ini +++ /dev/null @@ -1,9 +0,0 @@ -; this is the pycaosdb.ini for the server-side-scripting home. -[Connection] -url = https://caosdb-server:10443 -cacert = /opt/caosdb/cert/caosdb.cert.pem -debug = 0 -timeout = 5000 - -[Misc] -sendmail = /usr/local/bin/sendmail_to_file diff --git a/unittests/test_configs/pycaosdb-IntegrationTests.ini b/unittests/test_configs/pylinkahead-IntegrationTests.ini similarity index 70% rename from unittests/test_configs/pycaosdb-IntegrationTests.ini rename to unittests/test_configs/pylinkahead-IntegrationTests.ini index cb9871708f7f23c489de0cbc8f4fbda15dfa6ad0..0965b4be218703c14bfb2c0091e989af7387051f 100644 --- a/unittests/test_configs/pycaosdb-IntegrationTests.ini +++ b/unittests/test_configs/pylinkahead-IntegrationTests.ini @@ -2,14 +2,14 @@ ## This sections needs to exist in addition to the usual section [IntegrationTests] # test_server_side_scripting.bin_dir.local=/path/to/scripting/bin -test_server_side_scripting.bin_dir.local=/home/myself/test/caosdb-server/scripting/bin -# test_server_side_scripting.bin_dir.server=/opt/caosdb/git/caosdb-server/scripting/bin +test_server_side_scripting.bin_dir.local=/home/myself/test/linkahead-server/scripting/bin +# test_server_side_scripting.bin_dir.server=/opt/linkahead/git/linkahead-server/scripting/bin # # location of the files from the pyinttest perspective # test_files.test_insert_files_in_dir.local=/extroot/test_insert_files_in_dir/ test_files.test_insert_files_in_dir.local=/home/myself/test/debug_advanced/paths/extroot/test_insert_files_in_dir -# # location of the files from the caosdb_servers perspective -test_files.test_insert_files_in_dir.server=/opt/caosdb/mnt/extroot/test_insert_files_in_dir/ +# # location of the files from the LinkAhead server's perspective +test_files.test_insert_files_in_dir.server=/opt/linkahead/mnt/extroot/test_insert_files_in_dir/ ########## Files ################## ## Used by tests of file handling. Specify the path to an existing @@ -19,8 +19,8 @@ test_files.test_insert_files_in_dir.server=/opt/caosdb/mnt/extroot/test_insert_f # location of the files from the pyinttest (i.e. host) perspective #test_files.test_insert_files_in_dir.local=/extroot/test_insert_files_in_dir/ -# location of the files from the caosdb server's perspective -#test_files.test_insert_files_in_dir.server=/opt/caosdb/mnt/extroot/test_insert_files_in_dir/ +# location of the files from the LinkAhead server's perspective +#test_files.test_insert_files_in_dir.server=/opt/linkahead/mnt/extroot/test_insert_files_in_dir/ # # location of the one-time tokens from the pyinttest's perspective # test_authentication.admin_token_crud = /authtoken/admin_token_crud.txt diff --git a/unittests/test_configs/pylinkahead-empty.ini b/unittests/test_configs/pylinkahead-empty.ini new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/unittests/test_configs/pycaosdb-indiscale-demo.ini b/unittests/test_configs/pylinkahead-indiscale-demo.ini similarity index 100% rename from unittests/test_configs/pycaosdb-indiscale-demo.ini rename to unittests/test_configs/pylinkahead-indiscale-demo.ini diff --git a/unittests/test_configs/pycaosdb-real-world-1.ini b/unittests/test_configs/pylinkahead-real-world-1.ini similarity index 77% rename from unittests/test_configs/pycaosdb-real-world-1.ini rename to unittests/test_configs/pylinkahead-real-world-1.ini index e524f1d3465c61d89ae4a4dda54536a722f99837..bfdb48bb56de6e834af020d5ee6da082a91050f2 100644 --- a/unittests/test_configs/pycaosdb-real-world-1.ini +++ b/unittests/test_configs/pylinkahead-real-world-1.ini @@ -1,6 +1,6 @@ [Connection] url = https://localhost:10443 -cacert = /opt/caosdb/cert/caosdb.cert.pem +cacert = /opt/linkahead/cert/caosdb.cert.pem debug = 0 timeout = 5000 @@ -10,7 +10,7 @@ entity_loan.curator_mail_from=crawler-test@example.com entity_loan.curator_mail_to=crawler-test@example.com [sss_helper] -external_uri = https://caosdb.example.com:443 +external_uri = https://linkahead.example.com:443 [advancedtools] crawler.from_mail=admin@example.com diff --git a/unittests/test_configs/pycaosdb-real-world-2.ini b/unittests/test_configs/pylinkahead-real-world-2.ini similarity index 89% rename from unittests/test_configs/pycaosdb-real-world-2.ini rename to unittests/test_configs/pylinkahead-real-world-2.ini index 5ebd115a4a4de189d22180130acca2a4b78b6daf..b9ebb85a4e114f1e1aeabf74a7e11d6f0c2cdbd2 100644 --- a/unittests/test_configs/pycaosdb-real-world-2.ini +++ b/unittests/test_configs/pylinkahead-real-world-2.ini @@ -1,6 +1,6 @@ [Connection] url = https://samplemanager.example.com:443 -cacert = /opt/caosdb/cert/caosdb.cert.pem +cacert = /opt/linkahead/cert/caosdb.cert.pem debug = 0 timeout = 5000 [Misc] diff --git a/unittests/test_configs/pylinkahead-server-side-scripting.ini b/unittests/test_configs/pylinkahead-server-side-scripting.ini new file mode 100644 index 0000000000000000000000000000000000000000..74dbb3eb97d4e7df90223779481f3632c8fab7a3 --- /dev/null +++ b/unittests/test_configs/pylinkahead-server-side-scripting.ini @@ -0,0 +1,9 @@ +; this is the pylinkahead.ini for the server-side-scripting home. +[Connection] +url = https://linkahead-server:10443 +cacert = /opt/linkahead/cert/caosdb.cert.pem +debug = 0 +timeout = 5000 + +[Misc] +sendmail = /usr/local/bin/sendmail_to_file diff --git a/unittests/test_configs/pycaosdb1.ini b/unittests/test_configs/pylinkahead1.ini similarity index 79% rename from unittests/test_configs/pycaosdb1.ini rename to unittests/test_configs/pylinkahead1.ini index dcfa7c21fac735d81ab92b33f0abd31df25fc1ad..bd9b085dcf4e47c3a55c2521d0c4bf05dcd532ef 100644 --- a/unittests/test_configs/pycaosdb1.ini +++ b/unittests/test_configs/pylinkahead1.ini @@ -1,6 +1,6 @@ [Connection] cacert=/very/long/path/to/self/signed/pem/file/caosdb.ca.pem url=https://hostname:8833/playground -password_identifier=SECTION/caosdb +password_identifier=SECTION/linkahead username=username password_method=pass diff --git a/unittests/test_configs/pycaosdb2.ini b/unittests/test_configs/pylinkahead2.ini similarity index 100% rename from unittests/test_configs/pycaosdb2.ini rename to unittests/test_configs/pylinkahead2.ini diff --git a/unittests/test_configs/pycaosdb3.ini b/unittests/test_configs/pylinkahead3.ini similarity index 100% rename from unittests/test_configs/pycaosdb3.ini rename to unittests/test_configs/pylinkahead3.ini diff --git a/unittests/test_configs/pycaosdb4.ini b/unittests/test_configs/pylinkahead4.ini similarity index 100% rename from unittests/test_configs/pycaosdb4.ini rename to unittests/test_configs/pylinkahead4.ini diff --git a/unittests/test_configs/pycaosdb5.ini b/unittests/test_configs/pylinkahead5.ini similarity index 100% rename from unittests/test_configs/pycaosdb5.ini rename to unittests/test_configs/pylinkahead5.ini diff --git a/unittests/test_configs/pycaosdb6.ini b/unittests/test_configs/pylinkahead6.ini similarity index 100% rename from unittests/test_configs/pycaosdb6.ini rename to unittests/test_configs/pylinkahead6.ini diff --git a/unittests/test_configuration.py b/unittests/test_configuration.py index b135e7cd65b11be7cb6c4ef2237a41a6639ccbb7..9522e58c735b1c7e0efd6d029fe41c9e4e71088a 100644 --- a/unittests/test_configuration.py +++ b/unittests/test_configuration.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -23,7 +23,7 @@ # import pytest -import caosdb as db +import linkahead as db from os import environ, getcwd, remove from os.path import expanduser, isfile, join from pytest import raises @@ -33,18 +33,18 @@ from pytest import raises def temp_ini_files(): created_temp_ini_cwd = False created_temp_ini_home = False - if not isfile(join(getcwd(), "pycaosdb.ini")): - open("pycaosdb.ini", 'a').close() # create temporary ini file + if not isfile(join(getcwd(), "pylinkahead.ini")): + open("pylinkahead.ini", 'a').close() # create temporary ini file created_temp_ini_cwd = True - if not isfile(expanduser("~/.pycaosdb.ini")): - open(expanduser("~/.pycaosdb.ini"), 'a').close() # create temporary ini file in home directory + if not isfile(expanduser("~/.pylinkahead.ini")): + open(expanduser("~/.pylinkahead.ini"), 'a').close() # create temporary ini file in home directory created_temp_ini_home = True yield 0 if created_temp_ini_cwd: - remove("pycaosdb.ini") + remove("pylinkahead.ini") if created_temp_ini_home: - remove(expanduser("~/.pycaosdb.ini")) - environ["PYCAOSDBINI"] = "~/.pycaosdb.ini" + remove(expanduser("~/.pylinkahead.ini")) + environ["PYCAOSDBINI"] = "~/.pylinkahead.ini" def test_config_ini_via_envvar(temp_ini_files): @@ -57,10 +57,10 @@ def test_config_ini_via_envvar(temp_ini_files): # test wrong configuration file in envvar assert not expanduser(environ["PYCAOSDBINI"]) in db.configuration._read_config_files() # test good configuration file in envvar - environ["PYCAOSDBINI"] = "~/.pycaosdb.ini" - assert expanduser("~/.pycaosdb.ini") in db.configuration._read_config_files() + environ["PYCAOSDBINI"] = "~/.pylinkahead.ini" + assert expanduser("~/.pylinkahead.ini") in db.configuration._read_config_files() # test without envvar environ.pop("PYCAOSDBINI") - assert expanduser("~/.pycaosdb.ini") in db.configuration._read_config_files() + assert expanduser("~/.pylinkahead.ini") in db.configuration._read_config_files() # test configuration file in cwd - assert join(getcwd(), "pycaosdb.ini") in db.configuration._read_config_files() + assert join(getcwd(), "pylinkahead.ini") in db.configuration._read_config_files() diff --git a/unittests/test_connection.py b/unittests/test_connection.py index 6cc23d87c5cdcf639709a444849a856a8c70af5f..ca36a71680f8e13ac9114b9ab0bff0b6a96ea4c3 100644 --- a/unittests/test_connection.py +++ b/unittests/test_connection.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -21,24 +21,24 @@ # # ** end header # -"""Test caosdb.connection.""" +"""Test linkahead.connection.""" # pylint: disable=missing-docstring from __future__ import print_function, unicode_literals import re from builtins import bytes, str # pylint: disable=redefined-builtin -from caosdb import execute_query -from caosdb.configuration import _reset_config, get_config -from caosdb.connection.authentication.interface import CredentialsAuthenticator -from caosdb.connection.connection import (CaosDBServerConnection, - _DefaultCaosDBServerConnection, - configure_connection) -from caosdb.connection.mockup import (MockUpResponse, MockUpServerConnection, - _request_log_message) -from caosdb.connection.utils import make_uri_path, quote, urlencode -from caosdb.exceptions import (ConfigurationError, LoginFailedError, - CaosDBConnectionError) +from linkahead import execute_query +from linkahead.configuration import _reset_config, get_config +from linkahead.connection.authentication.interface import CredentialsAuthenticator +from linkahead.connection.connection import (CaosDBServerConnection, + _DefaultCaosDBServerConnection, + configure_connection) +from linkahead.connection.mockup import (MockUpResponse, MockUpServerConnection, + _request_log_message) +from linkahead.connection.utils import make_uri_path, quote, urlencode +from linkahead.exceptions import (ConfigurationError, LoginFailedError, + LinkAheadConnectionError) from nose.tools import assert_equal as eq from nose.tools import assert_false as falz from nose.tools import assert_is_not_none as there @@ -127,11 +127,11 @@ def test_configure_connection(): def test_configure_connection_bad_url(): configure_connection(url="https://localhost:8888") - with raises(CaosDBConnectionError) as exc_info: + with raises(LinkAheadConnectionError) as exc_info: configure_connection(url="ftp://localhost:8888") assert exc_info.value.args[0].startswith( "The connection url is expected to be a http or https url") - with raises(CaosDBConnectionError) as exc_info: + with raises(LinkAheadConnectionError) as exc_info: configure_connection(url="localhost:8888") assert exc_info.value.args[0].startswith( "The connection url is expected to be a http or https url") @@ -140,8 +140,8 @@ def test_configure_connection_bad_url(): def test_connection_interface(): with raiz(TypeError) as cm: CaosDBServerConnection() - eq(cm.exception.args[0][:55], - "Can't instantiate abstract class CaosDBServerConnection") + tru(cm.exception.args[0].startswith( + "Can't instantiate abstract class CaosDBServerConnection")) tru(hasattr(CaosDBServerConnection, "request")) tru(hasattr(CaosDBServerConnection.request, "__call__")) diff --git a/unittests/test_connection_utils.py b/unittests/test_connection_utils.py index 3890ae05cfe38b78a5ba0829753420246bdb560d..6a95fffa2f5f3dbfb302e035deee2f24fab9acf5 100644 --- a/unittests/test_connection_utils.py +++ b/unittests/test_connection_utils.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -21,23 +21,23 @@ # # ** end header # -"""Test caosdb.connection.utils.""" +"""Test linkahead.connection.utils.""" # pylint: disable=missing-docstring from __future__ import unicode_literals, print_function from pytest import raises from nose.tools import (assert_equal as eq, assert_raises as raiz, assert_true as tru, assert_is_not_none as there, assert_false as falz) -from caosdb.exceptions import ConfigurationError, LoginFailedError -from caosdb.connection.utils import parse_auth_token, auth_token_to_cookie -from caosdb.connection.connection import ( +from linkahead.exceptions import ConfigurationError, LoginFailedError +from linkahead.connection.utils import parse_auth_token, auth_token_to_cookie +from linkahead.connection.connection import ( configure_connection, CaosDBServerConnection, _DefaultCaosDBServerConnection) -from caosdb.connection.mockup import (MockUpServerConnection, MockUpResponse, - _request_log_message) -from caosdb.configuration import get_config, _reset_config -from caosdb.connection.authentication.interface import CredentialsAuthenticator -from caosdb import execute_query +from linkahead.connection.mockup import (MockUpServerConnection, MockUpResponse, + _request_log_message) +from linkahead.configuration import get_config, _reset_config +from linkahead.connection.authentication.interface import CredentialsAuthenticator +from linkahead import execute_query def setup_module(): diff --git a/unittests/test_container.py b/unittests/test_container.py index 0ac4be44826825aa3302119c8bca08f335ab68d3..113dd6223a9a8cd246b3b2998faa586fbae3da11 100644 --- a/unittests/test_container.py +++ b/unittests/test_container.py @@ -2,7 +2,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> @@ -25,7 +25,7 @@ """Tests for the Container class.""" from __future__ import absolute_import -import caosdb as db +import linkahead as db def test_get_property_values(): diff --git a/unittests/test_datatype.py b/unittests/test_datatype.py index 9b3c6267fb018e2cd3085dea568d7396c4549ac8..5a5e82cc5bfba9ac46a91b4baf4fe45665049c84 100644 --- a/unittests/test_datatype.py +++ b/unittests/test_datatype.py @@ -1,5 +1,5 @@ # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (c) 2020 IndiScale GmbH # Copyright (c) 2020 Daniel Hornung (d.hornung@indiscale.com) @@ -19,9 +19,9 @@ # # ** end header from pytest import raises -import caosdb as db -from caosdb.common import datatype -from caosdb.common.models import _parse_value +import linkahead as db +from linkahead.common import datatype +from linkahead.common.models import _parse_value def test_list(): diff --git a/unittests/test_entity.py b/unittests/test_entity.py index f2891fda266e1d62139b4cb2667c31b090ca6498..abf82f0a9b557cf9d1d2365e01fedaa4eae0c565 100644 --- a/unittests/test_entity.py +++ b/unittests/test_entity.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -27,9 +27,9 @@ import unittest from lxml import etree import os -from caosdb import (INTEGER, Entity, Property, Record, RecordType, - configure_connection) -from caosdb.connection.mockup import MockUpServerConnection +from linkahead import (INTEGER, Entity, Property, Record, RecordType, + configure_connection) +from linkahead.connection.mockup import MockUpServerConnection UNITTESTDIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/unittests/test_error_handling.py b/unittests/test_error_handling.py index 7f974e7db826d093e335b250953658b08db062cd..3f5241466e9a8f810b581cbb587e17ccf8f123ee 100644 --- a/unittests/test_error_handling.py +++ b/unittests/test_error_handling.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2020 Florian Spreckelsen <f.spreckelsen@indiscale.com> @@ -26,14 +26,14 @@ be TransactionErrors at first which may have one or more level of children. """ -import caosdb as db -from caosdb.common.models import raise_errors -from caosdb.exceptions import (AuthorizationError, - EntityDoesNotExistError, EntityError, - EntityHasNoDatatypeError, - TransactionError, UniqueNamesError, - UnqualifiedParentsError, - UnqualifiedPropertiesError) +import linkahead as db +from linkahead.common.models import raise_errors +from linkahead.exceptions import (AuthorizationError, + EntityDoesNotExistError, EntityError, + EntityHasNoDatatypeError, + TransactionError, UniqueNamesError, + UnqualifiedParentsError, + UnqualifiedPropertiesError) from pytest import raises diff --git a/unittests/test_exception.py b/unittests/test_exception.py new file mode 100644 index 0000000000000000000000000000000000000000..23607f46e1794ff336aa6687403c69f99b851988 --- /dev/null +++ b/unittests/test_exception.py @@ -0,0 +1,60 @@ +# -*- encoding: utf-8 -*- +# +# This file is a part of the LinkAhead Project. +# +# Copyright (C) 2023 Indiscale GmbH <info@indiscale.com> +# Copyright (C) 2023 Henrik tom Wörden <h.tomwoerden@indiscale.com> +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <https://www.gnu.org/licenses/>. +# + +import warnings + +from caosdb.exceptions import (CaosDBConnectionError, CaosDBException, + LinkAheadConnectionError, LinkAheadException) + +# make sure the deprecation is raised +with warnings.catch_warnings(record=True) as w: + # Cause all warnings to always be triggered. + warnings.simplefilter("always") + CaosDBException('1') + + assert issubclass(w[-1].category, DeprecationWarning) + assert "The name CaosDBException is deprecated" in str(w[-1].message) + CaosDBConnectionError('1') + + assert issubclass(w[-1].category, DeprecationWarning) + assert "The name CaosDBConnectionError is deprecated" in str(w[-1].message) + + +# make sure the deprecated Error still allows to catch exceptions +def raiseCE(): + raise CaosDBException('a') + + +def raiseCCE(): + raise CaosDBConnectionError('a') + + +# Exception must be caught +try: + raiseCE() +except CaosDBException as e: + print(e.msg) + +# Exception must be caught +try: + raiseCCE() +except CaosDBConnectionError as e: + print(e.msg) diff --git a/unittests/test_file.py b/unittests/test_file.py index 3c80af7f362a7cdabe0a9ebc89cd2986d04fe242..dd974cb176ca69e2ffb065b5de185611e528e815 100644 --- a/unittests/test_file.py +++ b/unittests/test_file.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -22,8 +22,8 @@ # ** end header # """Tests for the File class.""" -from caosdb import File, Record, configure_connection -from caosdb.connection.mockup import MockUpServerConnection +from linkahead import File, Record, configure_connection +from linkahead.connection.mockup import MockUpServerConnection # pylint: disable=missing-docstring from nose.tools import assert_equal as eq from nose.tools import assert_is_not_none as there diff --git a/unittests/test_issues.py b/unittests/test_issues.py index 3fb48416511ba654d6f998442319c4ff29ac2956..7472f710cea32c1d76f11e52fe7c3c3617804c3c 100644 --- a/unittests/test_issues.py +++ b/unittests/test_issues.py @@ -1,4 +1,4 @@ -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (c) 2022 IndiScale GmbH # Copyright (c) 2022 Daniel Hornung (d.hornung@indiscale.com) @@ -22,7 +22,7 @@ import os import lxml -import caosdb as db +import linkahead as db from pytest import raises diff --git a/unittests/test_message.py b/unittests/test_message.py index 440e7169501afb0a35acb78df95cefae01bd9426..d54b2daaf43778452adbd1564bbaa459ea0e7ff3 100644 --- a/unittests/test_message.py +++ b/unittests/test_message.py @@ -1,7 +1,7 @@ # encoding: utf-8 # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -23,16 +23,14 @@ # # ** end header # -import caosdb as db from copy import deepcopy - +import linkahead as db import pytest def test_messages_dict_behavior(): - from caosdb.common.models import Message - from caosdb.common.models import Messages + from linkahead.common.models import Message, Messages msgs = Messages() diff --git a/unittests/test_plantuml.py b/unittests/test_plantuml.py index a507c36b2d3a4246205fc7507cb05119c575084c..4605ebcb24a785c4f176e9008c837177eac1e3cc 100644 --- a/unittests/test_plantuml.py +++ b/unittests/test_plantuml.py @@ -2,7 +2,7 @@ # encoding: utf-8 # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2022 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2022 Henrik tom Wörden <h.tomwoerden@indiscale.com> @@ -29,9 +29,9 @@ test plantuml utility import tempfile import pytest -import caosdb as db +import linkahead as db import shutil -from caosdb.utils.plantuml import to_graphics +from linkahead.utils.plantuml import to_graphics @pytest.fixture diff --git a/unittests/test_property.py b/unittests/test_property.py index 84f89b5a959192d7831e1bb3eab3a441912afe7e..0fea6e5111d687d8f1b15ba189ec4f75405b5af2 100644 --- a/unittests/test_property.py +++ b/unittests/test_property.py @@ -1,6 +1,6 @@ # -*- encoding: utf-8 -*- # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -24,8 +24,8 @@ """Tests for the Property class.""" import os -import caosdb as db -from caosdb import Entity, Property, Record +import linkahead as db +from linkahead import Entity, Property, Record # pylint: disable=missing-docstring from lxml import etree diff --git a/unittests/test_query.py b/unittests/test_query.py index 12622ea486dda717ca1fbc1255510575c5e0c8e6..2c13f4b6488efacf32fc8c0afb8e26e29e0d7bc7 100644 --- a/unittests/test_query.py +++ b/unittests/test_query.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2021 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2021 Timm Fitschen <f.fitschen@indiscale.com> @@ -22,7 +22,7 @@ # ** end header # from lxml import etree -import caosdb as db +import linkahead as db def test_query_parsing(): diff --git a/unittests/test_record.py b/unittests/test_record.py index c08a3eb1605d25ce4a9f142895e50647fe02cc3a..92c0f84a8115b1483b0f860a32009f775378a286 100644 --- a/unittests/test_record.py +++ b/unittests/test_record.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -26,7 +26,7 @@ # """Tests for the Record class.""" # pylint: disable=missing-docstring -from caosdb import Entity, Record, RecordType +from linkahead import Entity, Record, RecordType def test_is_entity(): diff --git a/unittests/test_record_type.py b/unittests/test_record_type.py index f31c56decfc394211940296babc83200a470cc8a..594f9c647997d68cccdcccc56eaab482cd694c74 100644 --- a/unittests/test_record_type.py +++ b/unittests/test_record_type.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -22,8 +22,8 @@ # ** end header # """Tests for the RecordType class.""" -from caosdb import Entity, RecordType, configure_connection -from caosdb.connection.mockup import MockUpServerConnection +from linkahead import Entity, RecordType, configure_connection +from linkahead.connection.mockup import MockUpServerConnection # pylint: disable=missing-docstring from nose.tools import assert_equal as eq from nose.tools import assert_is_not_none as there diff --git a/unittests/test_schema.py b/unittests/test_schema.py index fc3f63a4cbaeadcac3c1cb9be2d861a0688fe4b0..feb84e3b7cd2825ef50d712cfaf089b2c4905b35 100644 --- a/unittests/test_schema.py +++ b/unittests/test_schema.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2022 Indiscale GmbH <info@indiscale.com> # Copyright (C) 2021 Alexander Schlemmer @@ -27,7 +27,7 @@ from jsonschema.exceptions import ValidationError from pytest import raises from glob import glob import os -from caosdb.configuration import config_to_yaml, validate_yaml_schema +from linkahead.configuration import config_to_yaml, validate_yaml_schema from configparser import ConfigParser diff --git a/unittests/test_server_side_scripting.py b/unittests/test_server_side_scripting.py index b699c4482d02972282167eb9683a956097ebc5e9..7749af982113c71be1717646e83813ee34c7cff0 100644 --- a/unittests/test_server_side_scripting.py +++ b/unittests/test_server_side_scripting.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> @@ -25,9 +25,9 @@ import json from urllib.parse import parse_qs from unittest.mock import Mock -from caosdb.utils import server_side_scripting as sss -from caosdb.connection.mockup import MockUpServerConnection, MockUpResponse -from caosdb import configure_connection +from linkahead.utils import server_side_scripting as sss +from linkahead.connection.mockup import MockUpServerConnection, MockUpResponse +from linkahead import configure_connection _REMOVE_FILES_AFTERWARDS = [] diff --git a/unittests/test_state.py b/unittests/test_state.py index 202c7a02af3db28434406626e5164def46febed7..d2bb08b791e3fc087b6078855667c4b5b3354024 100644 --- a/unittests/test_state.py +++ b/unittests/test_state.py @@ -1,7 +1,7 @@ import pytest -import caosdb as db -from caosdb import State, Transition -from caosdb.common.models import parse_xml, ACL +import linkahead as db +from linkahead import State, Transition +from linkahead.common.models import parse_xml, ACL from lxml import etree diff --git a/unittests/test_utils.py b/unittests/test_utils.py index 42d18ba06eb7516bb318de54cb537f548cfe9081..3d8e2896247f66c98f1461c1a1e91baca5f01cb6 100644 --- a/unittests/test_utils.py +++ b/unittests/test_utils.py @@ -1,7 +1,7 @@ # coding: utf-8 # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2018 Research Group Biomedical Physics, # Max-Planck-Institute for Dynamics and Self-Organization Göttingen @@ -21,10 +21,10 @@ # # ** end header # -"""Tests for caosdb.common.utils.""" +"""Tests for linkahead.common.utils.""" from __future__ import unicode_literals from lxml.etree import Element -from caosdb.common.utils import xml2str +from linkahead.common.utils import xml2str def test_xml2str(): diff --git a/unittests/test_versioning.py b/unittests/test_versioning.py index 5047069ca17b573b8b54dcaab984419083d06859..4fbe96d3a9abbeba80a530f4049aefc8d19036e8 100644 --- a/unittests/test_versioning.py +++ b/unittests/test_versioning.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ** header v3.0 -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2020 Timm Fitschen <t.fitschen@indiscale.com> # Copyright (C) 2020 IndiScale GmbH <info@indiscale.com> @@ -23,16 +23,16 @@ # from __future__ import absolute_import -from caosdb import Record -from caosdb.common.utils import xml2str -from caosdb.common.versioning import Version +from linkahead import Record +from linkahead.common.utils import xml2str +from linkahead.common.versioning import Version from .test_property import testrecord from lxml import etree def test_constructor(): v = Version(id="1234abcd", date="2020-01-01T20:15:00.000UTC", - username="testuser", realm="CaosDB", is_head=True, + username="testuser", realm="LinkAhead", is_head=True, predecessors=[Version(id="2345abdc", date="2020-01-01T20:00:00.000UTC")], successors=[Version(id="3465abdc", @@ -40,7 +40,7 @@ def test_constructor(): assert v.id == "1234abcd" assert v.date == "2020-01-01T20:15:00.000UTC" assert v.username == "testuser" - assert v.realm == "CaosDB" + assert v.realm == "LinkAhead" assert v.is_head is True assert isinstance(v.predecessors, list) assert isinstance(v.predecessors[0], Version) diff --git a/unittests/test_yamlapi.py b/unittests/test_yamlapi.py index cdb1e0499890ee58d10ff7f102632e104ef60868..027e5750d74cabe7a102eb01415aeb3c2c950f61 100644 --- a/unittests/test_yamlapi.py +++ b/unittests/test_yamlapi.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# This file is a part of the CaosDB Project. +# This file is a part of the LinkAhead Project. # # Copyright (C) 2021 IndiScale GmbH <info@indiscale.com> # Copyright (C) 2021 Alexander Kreft <akreft@trineo.org> @@ -22,9 +22,9 @@ import os import warnings import tempfile -from caosdb.yamlapi import (append_sublist, kv_to_xml, - dict_to_xml, yaml_to_xml, - process, yaml_file_to_xml) +from linkahead.yamlapi import (append_sublist, kv_to_xml, + dict_to_xml, yaml_to_xml, + process, yaml_file_to_xml) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered.