#!/usr/bin/python3.10 # -*-coding:Utf-8 -* """ test_metrics: Metrics Testing Module ------------------------------------------------------------------------------ Contains tests for verifying functionality of the Metrics class. """ import unittest import os from context import ontoScorer from ontoScorer.ontology import Ontology from ontoScorer.metrics import Metrics #-------------------------------------------------------------------------- # Metrics Test #-------------------------------------------------------------------------- class TestMetrics(unittest.TestCase): def setUp(self): DATA_FOLDER_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/test_data' self.ontology1_path = f"{DATA_FOLDER_PATH}/ontology_a.ttl" self.ontology2_path = f"{DATA_FOLDER_PATH}/ontology_b.ttl" self.onto1 = Ontology(self.ontology1_path) self.onto2 = Ontology(self.ontology2_path) self.metrics = Metrics() def test_computes_scores(self): self.metrics.compute_entity_scores(self.onto1, self.onto2) for element, score in self.metrics.scores.items(): if score.total_elements == 0: self.assertIsNone(score.precision, f"Precision for {element} should be None when no elements are present") self.assertIsNone(score.recall, f"Recall for {element} should be None when no elements are present") self.assertIsNone(score.f1, f"F1 score for {element} should be None when no elements are present") else: self.assertTrue(0 <= score.precision <= 1, f"Invalid precision for {element}") self.assertTrue(0 <= score.recall <= 1, f"Invalid recall for {element}") self.assertTrue(0 <= score.f1 <= 1, f"Invalid F1 score for {element}") def test_print_scores(self): self.metrics.compute_entity_scores(self.onto1, self.onto2) print() self.metrics.print_scores() #-------------------------------------------------------------------------- # Main Unit Test Run #-------------------------------------------------------------------------- if __name__ == "__main__": unittest.main()