Skip to content
Snippets Groups Projects
Select Git revision
  • 08c7b91b402bd40eaf451affe93dc97c3753e3f0
  • mui5-tetras-main-stable default protected
  • mui5-tetras-main-old-stable
  • preprod protected
  • 75-dernieres-ameliorations-avant-workshop-du-7-02
  • wip-fix-xywh
  • wip-positionement-annot
  • wip-surface-transformer
  • uploads-file
  • 69-la-video-demare-quand-on-fait-glisser-le-slider-et-le-clic-creer-un-decalage-entre-le-player
  • 61-recettage-des-outils-d-annotation
  • gestion_multiple_ouverture_pannel_annotation
  • autorisation_un_pannel_annotation
  • autorisation_un_pannel_edition_annotation
  • récupération_temps_video
  • save-shapes-and-position
  • fix-error-create-annotation-pannel
  • time-saving-on-annotation
  • tetras-main protected
  • fix-poc-mirador
  • tetras-antho-test
21 results

HMSInput.js

Blame
  • test_metrics.py 1.44 KiB
    #!/usr/bin/python3.10
    # -*-coding:Utf-8 -*
    
    #==============================================================================
    # test_metrics: Metrics Testing Module
    #------------------------------------------------------------------------------
    # Contains tests for verifying functionality of the Metrics class.
    #==============================================================================
    
    import unittest
    import os
    from context import ontoScorer
    from ontoScorer.ontology import Ontology
    from ontoScorer.metrics import Metrics
    
    class TestMetrics(unittest.TestCase):
    
        def setUp(self):
            DATA_FOLDER_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/test_data'
            self.ontology1_path = f"{DATA_FOLDER_PATH}/ontology_a.ttl"
            self.ontology2_path = f"{DATA_FOLDER_PATH}/ontology_b.ttl"
            self.onto1 = Ontology(self.ontology1_path)
            self.onto2 = Ontology(self.ontology2_path)
            self.metrics = Metrics()
    
        def test_calculate_scores(self):
            self.metrics.calculate(self.onto1, self.onto2)
            for key in self.metrics.scores:
                self.assertTrue(0 <= self.metrics.scores[key]["precision"] <= 1)
                self.assertTrue(0 <= self.metrics.scores[key]["recall"] <= 1)
                self.assertTrue(0 <= self.metrics.scores[key]["f1"] <= 1)
    
    
        def test_print_scores(self):
            self.metrics.calculate(self.onto1, self.onto2)
            self.metrics.print_scores()
    
    
    if __name__ == "__main__":
        unittest.main()