Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • caosdb/src/caosdb-advanced-user-tools
1 result
Show changes
Commits on Source (5)
Showing
with 666 additions and 14 deletions
......@@ -2,6 +2,8 @@ FROM debian:10
RUN apt-get update && \
apt-get install \
curl \
libhdf5-dev \
pkgconf \
python3 \
python3-pip \
python3-requests \
......
......@@ -58,8 +58,8 @@ test:
- cd .docker
- /bin/sh ./run.sh
- cd ..
- docker logs docker_caosdb-server_1 &> ../caosdb_log.txt
- docker logs docker_sqldb_1 &> ../mariadb_log.txt
- docker logs docker_caosdb-server_1 &> caosdb_log.txt
- docker logs docker_sqldb_1 &> mariadb_log.txt
- docker-compose -f .docker/docker-compose.yml down
- rc=`cat .docker/result`
- exit $rc
......
......@@ -27,6 +27,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Automated documentation builds: `make doc`
- Crawler documentation
- Proof-of-concept integration with Bloxberg.
- Introduce a cfood that can create a Record structure based on the contents of a hdf5 file
### Changed ###
......
......@@ -21,7 +21,7 @@
# This Makefile is a wrapper for several other scripts.
.PHONY: help doc install
.PHONY: help doc install unittest
help:
@echo 'Type `make doc` for documentation, or `make install` for (local) installation.'
......@@ -30,4 +30,7 @@ doc:
$(MAKE) -C src/doc html
install:
@echo "Not implemented yet, use pip for installation."
pip3 install .
unittest:
pytest-3 unittests
......@@ -36,6 +36,8 @@ from caosadvancedtools.scifolder import (AnalysisCFood, ExperimentCFood,
PublicationCFood, SimulationCFood,
SoftwareCFood)
from example_hdf5cfood import ExampleH5CFood
try:
from sss_helper import get_argument_parser, print_success
except ModuleNotFoundError:
......@@ -89,6 +91,7 @@ if __name__ == "__main__":
interactive=False, hideKnown=False,
cfood_types=[ExperimentCFood, AnalysisCFood, SoftwareCFood,
PublicationCFood, SimulationCFood,
ExampleH5CFood
])
if args.authorize_run:
......
#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2021 IndiScale GmbH <www.indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#
"""
An exemplary definition of a HDF5 CFood for integration testing
"""
import caosdb as db
from caosadvancedtools.cfoods.h5 import H5CFood
from caosadvancedtools.scifolder import ExperimentCFood
from caosadvancedtools.scifolder.generic_pattern import readme_pattern
class ExampleH5CFood(H5CFood):
root_name = "ExampleH5"
@staticmethod
def get_re():
return ExperimentCFood.get_re()[:-len(readme_pattern)] + r".*\.hdf5"
def create_identifiables(self):
super().create_identifiables()
self.identifiable_root = db.Record()
self.identifiable_root.add_property("hdf5File", self.crawled_file)
self.identifiable_root.add_parent("ExampleH5")
self.identifiables.append(self.identifiable_root)
def special_treatment(self, key, value, dtype):
if key == "attr_data_root":
return "single_attribute", value, dtype
return key, value, dtype
#!/usr/bin/env python3
import caosdb as db
import h5py
from caosadvancedtools.cfoods.h5 import H5CFood
from caosadvancedtools.models.data_model import DataModel
from caosadvancedtools.models.parser import parse_model_from_yaml
model = parse_model_from_yaml("model.yml")
......@@ -9,3 +12,11 @@ if len(db.execute_query("FIND Property alias")) == 0:
al = db.Property(name="alias")
al.add_parent(name="name")
al.insert()
h5model = db.Container()
h5file = h5py.File('extroot/ExperimentalData/2010_TestProject/2019-02-03/hdf5_dummy_file.hdf5', 'r')
H5CFood.create_structure(h5file, create_recordTypes=True, collection=h5model)
h5model[0].name = "ExampleH5"
print(h5model)
h5model = DataModel(h5model)
h5model.sync_data_model(noquestion=True)
......@@ -40,6 +40,8 @@ sources:
datatype: REFERENCE
scripts:
datatype: REFERENCE
single_attribute:
datatype: LIST(INTEGER)
Simulation:
obligatory_properties:
date:
......@@ -66,3 +68,5 @@ Presentation:
Report:
inherit_from_suggested:
- Publication
hdf5File:
datatype: REFERENCE
......@@ -26,6 +26,7 @@ import os
import unittest
import caosdb as db
from caosdb.apiutils import retrieve_entity_with_id
def get_entity_with_id(eid):
......@@ -486,3 +487,17 @@ class CrawlerTest(unittest.TestCase):
# Should have a description
self.assertIsNotNone(ana.description)
def test_exampleh5(self):
examp = db.execute_query("FIND Record ExampleH5", unique=True)
for prop in examp.properties:
if prop.name == 'group_level1_a':
self.assertTrue(retrieve_entity_with_id(prop.value).get_property("group_level2_aa") is not None)
self.assertTrue(retrieve_entity_with_id(prop.value).get_property("group_level1_a") is None)
elif prop.name == 'group_level1_b':
self.assertTrue(retrieve_entity_with_id(prop.value).get_property("level1_b_floats") is not None)
elif prop.name == 'group_level1_c':
self.assertTrue(retrieve_entity_with_id(prop.value).get_property("level1_c_floats") is not None)
elif prop.name == 'root_integers':
self.assertTrue(retrieve_entity_with_id(prop.value).get_property("single_attribute") is not None)
......@@ -157,12 +157,14 @@ def setup_package():
install_requires=["caosdb>=0.4.0",
"openpyxl>=3.0.0",
"pandas>=1.2.0",
"numpy>=1.17.3",
"xlrd>=2.0",
"h5py",
],
packages=find_packages('src'),
package_dir={'': 'src'},
setup_requires=["pytest-runner>=2.0,<3dev"],
tests_require=["pytest", "pytest-cov", "coverage>=4.4.2"],
tests_require=["pytest", "pytest-pythonpath", "pytest-cov", "coverage>=4.4.2"],
)
try:
setup(**metadata)
......
......@@ -32,6 +32,8 @@ from hashlib import sha256
import caosdb as db
from lxml import etree
import tempfile
def put_in_container(stuff):
if isinstance(stuff, list):
......@@ -154,7 +156,9 @@ class UpdateCache(Cache):
def __init__(self, db_file=None):
if db_file is None:
db_file = "/tmp/crawler_update_cache.db"
tmppath = tempfile.gettempdir()
tmpf = os.path.join(tmppath, "crawler_update_cache.db")
db_file = tmpf
super().__init__(db_file=db_file)
@staticmethod
......
......@@ -298,7 +298,7 @@ class AbstractFileCFood(AbstractCFood):
super().__init__(*args, item=crawled_path, **kwargs)
self._crawled_file = None
self.crawled_path = crawled_path
self.match = re.match(type(self).get_re(), crawled_path)
self.match = re.match(self.get_re(), crawled_path)
self.attached_filenames = []
@property
......@@ -309,7 +309,31 @@ class AbstractFileCFood(AbstractCFood):
return self._crawled_file
@staticmethod
def get_re():
def re_from_extensions(extensions):
"""Return a regular expression which matches the given file extensions.
Useful for inheriting classes.
Parameters
----------
extensions : iterable<str>
An iterable with the allowed extensions.
Returns
-------
out : str
The regular expression, starting with ``.*\\.`` and ending with the EOL dollar
character. The actual extension will be accessible in the
:py:attribute:`pattern group name<python:re.Pattern.groupindexe>` ``ext``.
"""
if not extensions:
return None
return r".*\.(?P<ext>" + "|".join(extensions) + ")$"
@classmethod
def get_re(cls):
""" Returns the regular expression used to identify files that shall be
processed
......@@ -377,6 +401,7 @@ def assure_object_is_in_list(obj, containing_object, property_name,
if containing_object.get_property(property_name) is None:
containing_object.add_property(property_name, value=[],
datatype=datatype)
# TODO: case where multiple times the same property exists is not treated
if not isinstance(containing_object.get_property(property_name).value, list):
containing_object.get_property(property_name).value = [
......@@ -628,7 +653,12 @@ def assure_has_property(entity, name, value, to_be_updated=None,
value = value.id
for el in possible_properties:
if el.value == value:
tmp_value = el.value
if isinstance(tmp_value, db.Entity):
tmp_value = el.value.id
if tmp_value == value:
contained = True
break
......
#!/usr/bin/env python3
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2020 IndiScale GmbH <www.indiscale.com>
# Copyright (C) 2020 Daniel Hornung <d.hornung@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""Specialized CFoods."""
#!/usr/bin/env python3
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2020,2021 IndiScale GmbH <www.indiscale.com>
# Copyright (C) 2020 Daniel Hornung <d.hornung@indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
# Copyright (C) 2021 Alexander Kreft
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""A CFood for hdf5 files
This module allows to parse hdf5 files and reproduce their structure in form
of Records that reference each other.
hdf5 files are composed of groups and datasets. Both of which can have
attributes. Groups and datasets are mapped to Records and attributes to
Properties.
"""
import re
from copy import deepcopy
import caosdb as db
import h5py
import numpy as np
from caosadvancedtools.cfood import fileguide
from caosdb.common.datatype import is_reference
from caosdb.common.utils import uuid
from ..cfood import (AbstractFileCFood, assure_has_description,
assure_has_parent, assure_has_property,
assure_property_is)
from ..structure_mapping import (EntityMapping, collect_existing_structure,
update_structure)
def h5_attr_to_property(val):
""" returns the value and datatype of a CaosDB Property for the given value
1d arrays are converted to lists
If no suitable Property can be created (None, None) is returned.
2d and higher dimensionality arrays are being ignored.
"""
if isinstance(val, str):
return val, db.TEXT
elif isinstance(val, complex):
return val, db.TEXT
else:
if not hasattr(val, 'dtype'):
raise NotImplementedError("Code assumes only str are missing the"
"dtype attribute")
if issubclass(val.dtype.type, np.floating):
dtype = db.DOUBLE
elif issubclass(val.dtype.type, np.integer):
dtype = db.INTEGER
elif val.dtype.kind in ['S', 'U']:
dtype = db.TEXT
val = val.astype(str)
elif val.dtype.kind == 'O':
if not np.all([isinstance(el, str) for el in val]):
raise NotImplementedError("Cannot convert arbitrary objects")
dtype = db.TEXT
val = val.astype(str)
else:
raise NotImplementedError("Unknown dtype used")
if isinstance(val, np.ndarray):
if val.ndim > 1:
return None, None
if val.ndim == 0:
raise NotImplementedError(
"Code assumes that scalar values "
"will not be given as np.ndarray objects")
val = list(val)
dtype = db.LIST(dtype)
# TODO this can eventually be removed
if(hasattr(val, 'ndim')):
if not isinstance(val, np.ndarray) and val.ndim != 0:
print(val, val.ndim)
raise Exception(
"Implementation assumes that only np.arrays have ndim.")
return val, dtype
class H5CFood(AbstractFileCFood):
""" H5CFood which consumes a HDF5 file.
The structure is mapped onto an equivalent structure of interconnected
Records.
Attributes
----------
root_name : str, default "root"
Type of the root Record (the Record corresponding to the root node in
the HDF5 file)
h5file : h5py.File, default None
Name of the hdf5-file to read
"""
# to be overwritten by subclasses
root_name = "root"
def __init__(self, *args, **kwargs):
"""CFood which consumes HDF5 files."""
super().__init__(*args, **kwargs)
self.h5file = None
self.hdf5Container = db.Container()
self.em = EntityMapping()
def collect_information(self):
self.h5file = h5py.File(fileguide.access(self.crawled_path), 'r')
@staticmethod
def get_re():
"""Return a regular expression string to match *.h5, *.nc, *.hdf, *.hdf5."""
extensions = [
"h5",
"nc",
"hdf",
"hdf5",
]
return AbstractFileCFood.re_from_extensions(extensions)
def create_identifiables(self):
"""Create identifiables out of groups in the HDF5 file.
This method will call is_identifiable(h5path, h5object) and create_identifiable(h5path,
h5object) on each HDF5 object to decide and actually create the identifiables.
"""
# manually create the identifiable root element: self.identifiable_root
self.structure = self.create_structure(self.h5file,
special_treatment=self.special_treatment)
def update_identifiables(self):
"""Check if the identifiables need to be updated.
In that case also add the updated entities to the list of updateables.
This method will iterate over the groups and datasets governed by this CFood's identifiables
and call ``update_object(path, h5object)`` on each object.
"""
self.structure._cuid = "root element"
self.em.add(self.structure, self.identifiable_root)
collect_existing_structure(self.structure, self.identifiable_root,
self.em)
self.to_be_inserted = db.Container()
self.insert_missing_structure(self.structure)
# TODO this is a workaround due to the fact that the caosdb library
# changes the objects in the Container if it is inserted. The graph
# structure is flattened. I.e. references to other entity objects are
# replaced with their IDs. However this code depends on this graph.
tmp_copy = deepcopy(self.to_be_inserted)
tmp_copy.insert()
for e1, e2 in zip(tmp_copy, self.to_be_inserted):
e2.id = e1.id
# End workaround
# self.update_structure(self.structure)
update_structure(self.em, self.to_be_updated, self.structure)
def special_treatment(self, key, value, dtype):
"""define special treatment of attributes
to be overwritten by child classes.
key: attribute name
value: attribute value
"""
return key, value, dtype
@classmethod
def create_structure(cls, h5obj, create_recordTypes=False, collection=None,
special_treatment=None):
"""Create Records and Record types from a given hdf5-object for all
items in the tree. Attributes are added as properties, the
values only if the dimension < 2.
Parameters
----------
h5obj : h5py.File
a hdf5-file object
Returns
-------
rec : db.Container
Contains the Record Types, Records and Properties for the
input-tree
"""
if collection is None:
collection = []
if special_treatment is None:
def special_treatment(x, y, z): return x, y, z
if h5obj.name == "/":
name_without_path = cls.root_name
else:
name_without_path = h5obj.name.split("/")[-1]
if create_recordTypes:
rec = db.RecordType(name=name_without_path)
else:
rec = db.Record().add_parent(name=name_without_path)
collection.append(rec)
if isinstance(h5obj, h5py.Group):
for subgroup in h5obj.keys():
subgroup_name = h5obj[subgroup].name.split("/")[-1]
sub = H5CFood.create_structure(h5obj[subgroup],
create_recordTypes=create_recordTypes,
collection=collection,
special_treatment=special_treatment)
if create_recordTypes:
rec.add_property(subgroup_name)
else:
rec.add_property(subgroup_name, value=sub)
for key, val in h5obj.attrs.items():
# ignored
if key in ["REFERENCE_LIST", "DIMENSION_LIST", "NAME", "CLASS"]:
continue
val, dtype = h5_attr_to_property(val)
if val is None and dtype is None:
continue
if create_recordTypes and key.lower() not in ['description']:
treated_k, _, treated_dtype = special_treatment(
key, val, dtype)
if treated_k is not None:
prop = db.Property(name=treated_k, datatype=treated_dtype)
collection.append(prop)
rec.add_property(name=treated_k)
else:
treated_k, treated_v, treated_dtype = special_treatment(
key, val, dtype)
if treated_k is not None:
rec.add_property(name=treated_k, value=treated_v,
datatype=treated_dtype)
return rec
def insert_missing_structure(self, target_structure: db.Record):
if target_structure._cuid not in self.em.to_existing:
self.to_be_inserted.append(target_structure)
for prop in target_structure.get_properties():
if prop.is_reference(server_retrieval=True):
self.insert_missing_structure(prop.value)
......@@ -26,8 +26,8 @@ from .cfood import AbstractFileCFood, assure_has_property
class ExampleCFood(AbstractFileCFood):
@staticmethod
def get_re():
@classmethod
def get_re(cls):
return (r".*/(?P<species>[^/]+)/"
r"(?P<date>\d{4}-\d{2}-\d{2})/README.md")
......
#!/usr/bin/env python3
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2021 IndiScale GmbH <www.indiscale.com>
# Copyright (C) 2021 Henrik tom Wörden <h.tomwoerden@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import caosdb as db
from caosdb.apiutils import resolve_reference
from caosdb.common.utils import uuid
from .cfood import (assure_has_description, assure_has_parent,
assure_property_is)
class EntityMapping(object):
"""
map local entities to entities on the server
the dict to_existing maps _cuid property to entity objects
the dict to_target maps id property to entity objects
"""
def __init__(self):
self.to_existing = {}
self.to_target = {}
def add(self, target, existing):
if target._cuid is None:
target._cuid = str(uuid())
self.to_existing[str(target._cuid)] = existing
self.to_target[existing.id] = target
def collect_existing_structure(target_structure, existing_root, em):
""" recursively collects existing entities
The collected entities are those that correspond to the ones in
target_structure.
em: EntityMapping
"""
for prop in target_structure.properties:
if prop.value is None:
continue
if not prop.is_reference(server_retrieval=True):
continue
if (len([p for p in target_structure.properties if p.name == prop.name])
!= 1):
raise ValueError("Current implementation allows only one property "
"for each property name")
if (existing_root.get_property(prop.name) is not None and
existing_root.get_property(prop.name).value is not None):
resolve_reference(prop)
resolve_reference(existing_root.get_property(prop.name))
referenced = existing_root.get_property(prop.name).value
if not isinstance(referenced, list):
referenced = [referenced]
target_value = prop.value
if not isinstance(target_value, list):
target_value = [target_value]
if len(target_value) != len(referenced):
raise ValueError()
for tent, eent in zip(target_value, referenced):
em.add(tent, eent)
collect_existing_structure(tent, eent, em)
def update_structure(em, updating: db.Container, target_structure: db.Record):
"""compare the existing records with the target record tree created
from the h5 object
Parameters
----------
existing_structure
retrieved entity; e.g. the top level identifiable
target_structure : db.Record
A record which may have references to other records. Must be a DAG.
"""
if target_structure._cuid in em.to_existing:
update_matched_entity(em,
updating,
target_structure,
em.to_existing[target_structure._cuid])
for prop in target_structure.get_properties():
if prop.is_reference(server_retrieval=True):
update_structure(em, updating, prop.value)
def update_matched_entity(em, updating, target_record, existing_record):
"""
update the Record existing in the server according to the Record
supplied as target_record
"""
for parent in target_record.get_parents():
if parent.name == "":
raise ValueError("Parent name must not be empty.")
assure_has_parent(existing_record, parent.name, force=True)
if target_record.description is not None:
# check whether description is equal
assure_has_description(existing_record, target_record.description,
to_be_updated=updating)
for prop in target_record.get_properties():
# check for remaining property types
if isinstance(prop.value, db.Entity):
if prop.value._cuid in em.to_existing:
value = em.to_existing[prop.value._cuid].id
else:
value = prop.value.id
else:
value = prop.value
assure_property_is(existing_record, prop.name, value,
to_be_updated=updating)
......@@ -5,6 +5,8 @@ import os
import sqlite3
from hashlib import sha256
import tempfile
class SuppressKnown(logging.Filter):
"""
......@@ -26,8 +28,9 @@ class SuppressKnown(logging.Filter):
if db_file:
self.db_file = db_file
else:
self.db_file = "/tmp/caosadvanced_suppressed_cache.db"
tmppath = tempfile.gettempdir()
tmpf = os.path.join(tmppath, "caosadvanced_suppressed_cache.db")
self.db_file = tmpf
if not os.path.exists(self.db_file):
self.create_cache()
......
......@@ -4,7 +4,7 @@ skip_missing_interpreters = true
[testenv]
deps=nose
pandas
caosdb
git+https://gitlab.indiscale.com/caosdb/src/caosdb-pylib.git@dev
pytest
pytest-cov
openpyxl
......
import h5py
import numpy as np
def create_hdf5_file(filename="hdf5_dummy_file.hdf5"):
'''
Create a dummy hdf5-file for testing.
Structure:
root:-->root
group_level1_a:-->group
group_level2_aa:-->group
group_level3_aaa:-->group
level3_aaa_floats_2d = float64(100x100)
group_level3_aab:-->group
group_level2_ab:-->group
group_level3_aba:-->group
level3_aba_floats_2d = float64(100x100)
group_level2_ac:-->group
level2_ac_integers_2d = int32(100x100)
group_level1_b:-->group
group_level2_ba:-->group
level2_ba_integers_2d = int32(100x100)
level1_b_floats = float64(10000)
group_level1_c:-->group
level1_c_floats = float64(10000)
root_integers = int32(10000)
'''
with h5py.File(filename, mode="w") as hdf5:
'''Create toplevel groups'''
group_lvl1_a = hdf5.create_group("group_level1_a")
group_lvl1_b = hdf5.create_group("group_level1_b")
group_lvl1_c = hdf5.create_group("group_level1_c")
'''Create level 2 groups'''
group_lvl2_aa = group_lvl1_a.create_group("group_level2_aa")
group_lvl2_ab = group_lvl1_a.create_group("group_level2_ab")
group_lvl2_ac = group_lvl1_a.create_group("group_level2_ac")
group_lvl2_ba = group_lvl1_b.create_group("group_level2_ba")
'''Create level 3 groups'''
group_lvl3_aaa = group_lvl2_aa.create_group("group_level3_aaa")
group_lvl3_aab = group_lvl2_aa.create_group("group_level3_aab")
group_lvl3_aba = group_lvl2_ab.create_group("group_level3_aba")
'''Create datasets'''
integers = np.arange(10000)
floats = np.arange(0, 1000, 0.1)
integers_2d = np.diag(np.arange(100))
floats_2d = np.eye(100)
data_root = hdf5.create_dataset("root_integers", data=integers)
data_lvl1_b = group_lvl1_b.create_dataset("level1_b_floats", data=floats)
data_lvl2_c = group_lvl1_c.create_dataset("level1_c_floats", data=floats)
data_lvl2_ac = group_lvl2_ac.create_dataset("level2_ac_integers_2d", data=integers_2d)
data_lvl2_ba = group_lvl2_ba.create_dataset("level2_ba_integers_2d", data=integers_2d)
data_lvl3_aaa = group_lvl3_aaa.create_dataset("level3_aaa_floats_2d", data=floats_2d)
data_lvl3_aba = group_lvl3_aba.create_dataset("level3_aba_floats_2d", data=floats_2d)
'''Create attributes'''
attr_group_lvl1_a = group_lvl1_a.attrs.create("attr_group_lvl1_a", 1)
attr_group_lvl2_aa = group_lvl2_aa.attrs.create("attr_group_lvl2_aa", -2)
attr_group_lvl3_aaa = group_lvl3_aaa.attrs.create("attr_group_lvl3_aaa", 1.0)
attr_data_root = data_root.attrs.create("attr_data_root", -2.0)
attr_data_lvl2_ac = data_lvl2_ac.attrs.create("attr_data_lvl2_ac", np.diag(np.arange(10)))
attr_data_lvl3_aaa = data_lvl3_aaa.attrs.create("attr_data_lvl3_aaa", np.eye(10))
if __name__ == "__main__":
create_hdf5_file()