#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2022 Alexander Schlemmer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#

"""
module description
"""
import os
import pytest
from subprocess import run

import caosdb as db
from caosadvancedtools.loadFiles import loadpath
from caosadvancedtools.models import parser as parser
from caoscrawler.crawl import crawler_main

# TODO: wait for release of this feature in pylib
# from caosdb.utils.register_tests import clear_database, set_test_key
# set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")

DATADIR = os.path.join(os.path.dirname(__file__), "test_data",
                       "extroot", "use_case_simple_presentation")

# TODO: remove this


@pytest.fixture
def clear_database():
    # TODO(fspreck): Remove once the corresponding advancedtools function can be
    # used.
    ents = db.execute_query("FIND ENTITY WITH ID>99")
    if ents:
        ents.delete()


def test_complete_crawler(
        clear_database
):
    # Setup the data model:
    model = parser.parse_model_from_yaml(os.path.join(DATADIR, "model.yml"))
    model.sync_data_model(noquestion=True, verbose=False)

    # Insert the data:
    for path in [
            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/ExperimentalData",
            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/DataAnalysis"]:
        loadpath(
            path=path,
            include=None,
            exclude=None,
            prefix="/",
            dryrun=False,
            forceAllowSymlinks=False)

    crawler_main(DATADIR,
                 os.path.join(DATADIR, "cfood.yml"),
                 os.path.join(DATADIR, "identifiables.yml"),
                 True,
                 os.path.join(DATADIR, "provenance.yml"),
                 False,
                 "/use_case_simple_presentation")

    res = db.execute_query("FIND Record Experiment")
    assert len(res) == 1
    assert res[0].get_property("identifier").value == "crawlertest"
    assert res[0].get_property("date").value == "2022-03-16"

    lf = db.File(id=res[0].get_property("mdfile").value).retrieve()
    assert lf.path == "/ExperimentalData/data.md"

    assert res[0].get_property("alpha").value == 16.0
    assert res[0].get_property("alpha").unit == "km"

    res_da = db.execute_query("FIND Record DataAnalysis")
    assert len(res_da) == 1
    assert res_da[0].get_property("sources").value[0] == res[0].id

    lf = db.File(id=res_da[0].get_property("mdfile").value).retrieve()
    assert lf.path == "/DataAnalysis/results.md"