Skip to content
Snippets Groups Projects
Select Git revision
  • RC-Rekall-v1.1-fix-2
  • main default
  • 35-cgu
  • 34-peertube-support
  • 27-add-autoplay-to-iframe
  • 33-bug-on-youtube-embed-urls
  • RC-Rekall-v1.1-fix_lpo
  • tuleap-140-go-back-to-my-capsules-page-when-i-m-on-capsule-preview-page
  • RC-Rekall-v1.2-fix10
  • RC-Rekall-v1.2-fix9
  • RC-Rekall-v1.2-fix8
  • RC-Rekall-v1.2-fix7
  • RC-Rekall-v1.2-fix6
  • RC-Rekall-v1.2-fix5
  • RC-Rekall-v1.2-fix4
  • RC-Rekall-v1.2-fix3
  • RC-Rekall-v1.2-fix2
  • RC-Rekall-v1.2-fix1
  • RC-Rekall-v1.1-fix-3
  • RC-Rekall-v1.1-fix-1
  • RC-Rekall-v1.1-delivered
  • preprod20220209-1535
22 results

symfony.lock

Blame
  • test_metrics.py 2.13 KiB
    #!/usr/bin/python3.10
    # -*-coding:Utf-8 -*
    
    """
    test_metrics: Metrics Testing Module
    ------------------------------------------------------------------------------
    Contains tests for verifying functionality of the Metrics class.
    """
    
    import unittest
    import os
    from context import ontoScorer
    from ontoScorer.ontology import Ontology
    from ontoScorer.metrics import Metrics
    
    #--------------------------------------------------------------------------
    # Metrics Test
    #--------------------------------------------------------------------------
    class TestMetrics(unittest.TestCase):
    
        def setUp(self):
            DATA_FOLDER_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/test_data'
            self.ontology1_path = f"{DATA_FOLDER_PATH}/ontology_a.ttl"
            self.ontology2_path = f"{DATA_FOLDER_PATH}/ontology_b.ttl"
            self.onto1 = Ontology(self.ontology1_path)
            self.onto2 = Ontology(self.ontology2_path)
            self.metrics = Metrics()
    
    
        def test_computes_scores(self):
            self.metrics.compute_entity_scores(self.onto1, self.onto2)
            for element, score in self.metrics.scores.items():
                if score.total_elements == 0:
                    self.assertIsNone(score.precision, f"Precision for {element} should be None when no elements are present")
                    self.assertIsNone(score.recall, f"Recall for {element} should be None when no elements are present")
                    self.assertIsNone(score.f1, f"F1 score for {element} should be None when no elements are present")
                else:
                    self.assertTrue(0 <= score.precision <= 1, f"Invalid precision for {element}")
                    self.assertTrue(0 <= score.recall <= 1, f"Invalid recall for {element}")
                    self.assertTrue(0 <= score.f1 <= 1, f"Invalid F1 score for {element}")
    
    
        def test_print_scores(self):
            self.metrics.compute_entity_scores(self.onto1, self.onto2)
            print()
            self.metrics.print_scores()
    
    
    #--------------------------------------------------------------------------
    # Main Unit Test Run
    #--------------------------------------------------------------------------
    if __name__ == "__main__":
        unittest.main()