Newer
Older
# -*- coding: utf-8 -*-
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (c) 2020 IndiScale GmbH <info@indiscale.com>
# Copyright (c) 2020 Florian Spreckelsen <f.spreckelsen@indiscale.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
"""Test whether the crawler correctly identifies the data model problems caused by a faulty model.
import caosdb as db
from caosadvancedtools import loadFiles
from caosadvancedtools.cfood import fileguide
from caosadvancedtools.crawler import FileCrawler
from caosadvancedtools.datamodel_problems import DataModelProblems
from caosadvancedtools.guard import INSERT
from caosadvancedtools.models.parser import parse_model_from_yaml
from caosadvancedtools.scifolder import (AnalysisCFood, ExperimentCFood,
PublicationCFood, SimulationCFood)
from insert_model import main as insert_model
def setup_module():
"""Clear problems and remove all entities except for built-in ones."""
DataModelProblems.missing.clear()
try:
db.execute_query("FIND entity WITH ID > 100").delete()
except Exception as delete_exc:
print(delete_exc)
def setup():
"""No further setup required."""
setup_module()
def teardown():
"""Delete and clear again."""
setup_module()
def test_crawler_with_data_model_problems():
"""Test whether data model problems are found correctly."""
# load the files
basepath = "/opt/caosdb/mnt/extroot/"
pathlist = [basepath+dirname for dirname in
["ExperimentalData", "DataAnalysis",
"SimulationData", "Publications"]]
for path in pathlist:
loadFiles.loadpath(path=path, include="", exclude="",
prefix="", dryrun=False, forceAllowSymlinks=False)
# load and damage the model
insert_model()
deleted_entities = {"Experiment", "Poster", "results"}
for ent in deleted_entities:
db.execute_query("FIND "+ent).delete()
# Do the crawling
def access(x): return "extroot" + x
fileguide.access = access
crawl_path = '/'
files = FileCrawler.query_files(crawl_path)
c = FileCrawler(files=files, use_cache=False,
abort_on_exception=False, interactive=False,
hideKnown=False, cfood_types=[ExperimentCFood,
AnalysisCFood,
PublicationCFood,
SimulationCFood, ])
c.crawl(security_level=INSERT, path=crawl_path)
# There should be datamodel problems
assert len(DataModelProblems.missing) > 0
# Deleted entities should have been identified:
assert DataModelProblems.missing.issubset(deleted_entities)