Skip to content
Snippets Groups Projects
Select Git revision
  • 06bc27daa2d634f5813675aab8b7e3a0c8cbcc26
  • main default protected
  • dev
  • f-spss-value-label-name
  • f-unmod
  • f-checkidentical
  • f-simple-breakpoint
  • f-new-debug-tree
  • f-existing-file-id
  • f-no-ident
  • f-collect-problems
  • f-refactor-debug-tree
  • v0.13.0
  • v0.12.0
  • v0.11.0
  • v0.10.1
  • v0.10.0
  • v0.9.1
  • v0.9.0
  • v0.8.0
  • v0.7.1
  • v0.7.0
  • v0.6.0
  • v0.5.0
  • v0.4.0
  • v0.3.0
  • v0.2.0
  • v0.1.0
28 results

test_sync_graph.py

Blame
  • Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    test_tool_extended.py 2.47 KiB
    #!/bin/python
    # Tests for the tool using pytest
    # Adapted from check-sfs
    # A. Schlemmer, 06/2021
    
    from newcrawler import Crawler
    from newcrawler.structure_elements import File, DictTextElement, DictListElement
    from newcrawler.identifiable_adapters import IdentifiableAdapter, LocalStorageIdentifiableAdapter
    from functools import partial
    from copy import deepcopy
    from unittest.mock import MagicMock, Mock
    from os.path import join, dirname, basename
    import yaml
    import caosdb as db
    from caosdb.apiutils import compare_entities
    
    import pytest
    from pytest import raises
    
    
    def rfp(*pathcomponents):
        """
        Return full path.
        Shorthand convenience function.
        """
        return join(dirname(__file__), *pathcomponents)
    
    
    def dircheckstr(*pathcomponents, structure_element_type="Directory"):
        """
        Return the debug tree identifier for a given path.
        """
        return ("newcrawler.structure_elements." + structure_element_type + ": " +
                basename(join(*pathcomponents)) + ", " +
                rfp("test_directories", "examples_article", *pathcomponents))
    
    
    @pytest.fixture
    def crawler():
        crawler = Crawler(debug=True)
        crawler.crawl_directory(rfp("test_directories", "examples_article"),
                                rfp("scifolder_extended.yml"))
        return crawler
    
    
    # @pytest.fixture
    # def ident(crawler):
    #     ident = LocalStorageIdentifiableAdapter()
    #     crawler.identifiableAdapter = ident
        
    #     ident.restore_state(rfp("records.xml"))
    
    #     ident.register_identifiable(
    #         "Person", db.RecordType()
    #         .add_parent(name="Person")
    #         .add_property(name="first_name")
    #         .add_property(name="last_name"))
    #     ident.register_identifiable(
    #         "Measurement", db.RecordType()
    #         .add_parent(name="Measurement")
    #         .add_property(name="identifier")
    #         .add_property(name="date")
    #         .add_property(name="project"))
    #     ident.register_identifiable(
    #         "Project", db.RecordType()
    #         .add_parent(name="Project")
    #         .add_property(name="date")
    #         .add_property(name="identifier"))
    #     return ident
    
    
    def test_file_structure_generation(crawler):
        sd = crawler.debug_tree[dircheckstr("SimulationData",
                                            "2020_climate-model-predict", "2020-02-01",
                                            "README.md", structure_element_type="File")]
        assert sd[1]["ReadmeFile"].role == "File"
        assert len(sd[1]["ReadmeFile"].path) > 0
        assert len(sd[1]["ReadmeFile"].file) > 0