#!/usr/bin/env python3
# encoding: utf-8
#
# ** header v3.0
# This file is a part of the CaosDB Project.
#
# Copyright (C) 2022 Alexander Schlemmer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ** end header
#

import logging
import os
import pytest
from subprocess import run

import linkahead as db
from caosadvancedtools.loadFiles import loadpath
from linkahead.cached import cache_clear
from caosadvancedtools.models import parser as parser
from caoscrawler.crawl import crawler_main
from linkahead.utils.register_tests import clear_database, set_test_key


set_test_key("10b128cf8a1372f30aa3697466bb55e76974e0c16a599bb44ace88f19c8f61e2")
DATADIR = os.path.join(os.path.dirname(__file__), "test_data",
                       "extroot", "use_case_simple_presentation")


@pytest.fixture(autouse=True)
def clear_cache():
    cache_clear()


def test_complete_crawler(clear_database, caplog):
    # Setup the data model:
    model = parser.parse_model_from_yaml(os.path.join(DATADIR, "model.yml"))
    model.sync_data_model(noquestion=True, verbose=False)

    # Insert the data:
    for path in [
            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/ExperimentalData",
            "/opt/caosdb/mnt/extroot/use_case_simple_presentation/DataAnalysis"]:
        loadpath(
            path=path,
            include=None,
            exclude=None,
            prefix="/",
            dryrun=False,
            forceAllowSymlinks=False)

    # test that a bad value for "remove_prefix" leads to runtime error
    caplog.set_level(logging.DEBUG, logger="caoscrawler.crawl")
    assert 1 == crawler_main(
        crawled_directory_path=os.path.join(DATADIR),
        cfood_file_name=os.path.join(DATADIR, "cfood.yml"),
        identifiables_definition_file=os.path.join(DATADIR, "identifiables.yml"),
        provenance_file=os.path.join(DATADIR, "provenance.yml"),
        dry_run=False,
        remove_prefix="sldkfjsldf",
    )
    assert "path does not start with the prefix" in caplog.text
    caplog.clear()

    crawler_main(
        crawled_directory_path=os.path.join(DATADIR),
        cfood_file_name=os.path.join(DATADIR, "cfood.yml"),
        identifiables_definition_file=os.path.join(DATADIR, "identifiables.yml"),
        provenance_file=os.path.join(DATADIR, "provenance.yml"),
        dry_run=False,
        remove_prefix=os.path.abspath(DATADIR),
    )

    res = db.execute_query("FIND Record Experiment")
    assert len(res) == 1
    assert res[0].get_property("identifier").value == "crawlertest"
    assert res[0].get_property("date").value == "2022-03-16"

    lf = db.File(id=res[0].get_property("mdfile").value).retrieve()
    assert lf.path == "/ExperimentalData/data.md"

    assert res[0].get_property("alpha").value == 16.0
    assert res[0].get_property("alpha").unit == "km"

    res_da = db.execute_query("FIND Record DataAnalysis")
    assert len(res_da) == 1
    assert res_da[0].get_property("sources").value[0] == res[0].id

    lf = db.File(id=res_da[0].get_property("mdfile").value).retrieve()
    assert lf.path == "/DataAnalysis/results.md"