Skip to content
Snippets Groups Projects
Select Git revision
  • a7603d54d2e6349f433395d0d7fb3b6637a4a00c
  • demo_ci_gitlab_pages default
  • demo_gitlab_ci
  • 5-images-in-annotations
  • 5-final-images
  • 5-chpk-images-in-annot
  • tetras-main protected
  • 5-rebase-images-in-annot
  • 5-wip-images-in-annot
  • tmp
  • 1-edit-annotations-on-videos
  • 5-old-images-in-annotations
  • old_demo_ci_gitlab_pages
  • images_annotations
  • wip
  • devsetup
  • wip-annot-video-ui
  • wip-annotations-on-videos
  • master
  • v0.4.0_react16
  • wip-debugging-annotations
21 results

TextEditor.js

Blame
  • Forked from IIIF / Mirador / Mirador annotations
    Source project has a limited visibility.
    test_metrics.py 1.44 KiB
    #!/usr/bin/python3.10
    # -*-coding:Utf-8 -*
    
    #==============================================================================
    # test_metrics: Metrics Testing Module
    #------------------------------------------------------------------------------
    # Contains tests for verifying functionality of the Metrics class.
    #==============================================================================
    
    import unittest
    import os
    from context import ontoScorer
    from ontoScorer.ontology import Ontology
    from ontoScorer.metrics import Metrics
    
    class TestMetrics(unittest.TestCase):
    
        def setUp(self):
            DATA_FOLDER_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/test_data'
            self.ontology1_path = f"{DATA_FOLDER_PATH}/ontology_a.ttl"
            self.ontology2_path = f"{DATA_FOLDER_PATH}/ontology_b.ttl"
            self.onto1 = Ontology(self.ontology1_path)
            self.onto2 = Ontology(self.ontology2_path)
            self.metrics = Metrics()
    
        def test_calculate_scores(self):
            self.metrics.calculate(self.onto1, self.onto2)
            for key in self.metrics.scores:
                self.assertTrue(0 <= self.metrics.scores[key]["precision"] <= 1)
                self.assertTrue(0 <= self.metrics.scores[key]["recall"] <= 1)
                self.assertTrue(0 <= self.metrics.scores[key]["f1"] <= 1)
    
    
        def test_print_scores(self):
            self.metrics.calculate(self.onto1, self.onto2)
            self.metrics.print_scores()
    
    
    if __name__ == "__main__":
        unittest.main()