Skip to content
Snippets Groups Projects
Select Git revision
  • 3e696d63f5425975a23435e8c25eada9bb42542d
  • mui5-annotation-on-video-stable default
  • get_setter_canvasSizeInformations
  • fix-error-div-into-p
  • annotation-on-video-v2
  • detached
  • annotation-on-video-r17
  • mui5
  • mui5-react-18
  • jacob-test
  • annotation-on-video protected
  • master
  • test-antoinev1
  • 20-fetch-thumbnail-on-annotation
  • add-research-field
  • Save
  • add-plugin
  • 14-wip-no-seek-to
  • 14-bug-on-video-time-control
  • 9_wip_videotests
  • _upgrade_material_ui
  • latest-tetras-16
  • v3.3.0
  • v3.2.0
  • v3.1.1
  • v3.1.0
  • v3.0.0
  • v3.0.0-rc.7
  • v3.0.0-rc.6
  • v3.0.0-rc.5
  • v3.0.0-rc.4
  • v3.0.0-rc.3
  • v3.0.0-rc.2
  • v3.0.0-rc.1
  • v3.0.0-beta.10
  • v3.0.0-beta.9
  • v3.0.0-beta.8
  • v3.0.0-beta.7
  • v3.0.0-beta.6
  • v3.0.0-beta.5
  • v3.0.0-beta.3
41 results

webpack.config.js

Blame
  • test_metrics.py 2.13 KiB
    #!/usr/bin/python3.10
    # -*-coding:Utf-8 -*
    
    """
    test_metrics: Metrics Testing Module
    ------------------------------------------------------------------------------
    Contains tests for verifying functionality of the Metrics class.
    """
    
    import unittest
    import os
    from context import ontoScorer
    from ontoScorer.ontology import Ontology
    from ontoScorer.metrics import Metrics
    
    #--------------------------------------------------------------------------
    # Metrics Test
    #--------------------------------------------------------------------------
    class TestMetrics(unittest.TestCase):
    
        def setUp(self):
            DATA_FOLDER_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/test_data'
            self.ontology1_path = f"{DATA_FOLDER_PATH}/ontology_a.ttl"
            self.ontology2_path = f"{DATA_FOLDER_PATH}/ontology_b.ttl"
            self.onto1 = Ontology(self.ontology1_path)
            self.onto2 = Ontology(self.ontology2_path)
            self.metrics = Metrics()
    
    
        def test_computes_scores(self):
            self.metrics.compute_entity_scores(self.onto1, self.onto2)
            for element, score in self.metrics.scores.items():
                if score.total_elements == 0:
                    self.assertIsNone(score.precision, f"Precision for {element} should be None when no elements are present")
                    self.assertIsNone(score.recall, f"Recall for {element} should be None when no elements are present")
                    self.assertIsNone(score.f1, f"F1 score for {element} should be None when no elements are present")
                else:
                    self.assertTrue(0 <= score.precision <= 1, f"Invalid precision for {element}")
                    self.assertTrue(0 <= score.recall <= 1, f"Invalid recall for {element}")
                    self.assertTrue(0 <= score.f1 <= 1, f"Invalid F1 score for {element}")
    
    
        def test_print_scores(self):
            self.metrics.compute_entity_scores(self.onto1, self.onto2)
            print()
            self.metrics.print_scores()
    
    
    #--------------------------------------------------------------------------
    # Main Unit Test Run
    #--------------------------------------------------------------------------
    if __name__ == "__main__":
        unittest.main()