Skip to content
Snippets Groups Projects
Select Git revision
  • 4ded93d77b8878e54dc71c34070fdc7aa76b240f
  • annotation-on-video default protected
  • demo_ci
  • 3-upstream-01022023
  • master
  • gh3538-captions
  • 16-adapt-for-images-annot
  • 15-api-for-annotations-on-video
  • 15-annotations-on-videos
  • video_for_annotations
  • wip-1-annotations-on-videos
  • 9-videoviewer-tests
  • 9_wip_videotests
  • 6-fix-tests-and-ci
  • _fix_ci
  • wip-webpack-from-git
16 results

auth.js

Blame
  • test_rule_phenomena_classifier.py 4.68 KiB
    #!/usr/bin/python3.10
    # -*-coding:Utf-8 -*
    
    #==============================================================================
    # TENET: Extraction Rule Test
    #------------------------------------------------------------------------------
    # Script to test rules under development
    #==============================================================================
    
    import subprocess, os
    from rdflib import Graph, Namespace
    from rdflib.namespace import NamespaceManager, FOAF, RDF
    from rdflib import URIRef, Literal, BNode
    
    FILE_PATH = f'{os.path.dirname(os.path.abspath(__file__))}'
    INPUT_DIR_PATH = f'{FILE_PATH}/test_data/'
    OUTPUT_DIR_PATH = f'{FILE_PATH}/test_data/'
    
    TEST_FILE_NAME_1 = 'mod-analyzer-devGraph-1'
    TEST_FILE_NAME_2 = 'mod-analyzer-devGraph-2'
    
    from context import tenet
    from tenet.transduction.rdfterm_computer import __update_uri_with_prefix
    from tenet.transduction import rdfterm_computer, prefix_handle
    from tenet.transduction import net
    from tenet.scheme.amr_rule.transduction import phenomena_mod_analyzer_1 as rule_1
    from tenet.scheme import amr_rule
    
    
    
    #==============================================================================
    # Useful Methods
    #==============================================================================
    
    def load_test_graph(test_file_name):
        print(f'\n -- Test Graph Loading')
        graph = Graph()
        prefix_handle.update_graph_namespacemanager(graph)
        graph_path = f'{INPUT_DIR_PATH}{test_file_name}.ttl'
        graph.parse(graph_path)
        print(f" ----- Graph Loaded ({len(graph)})")
        return graph
    
    
    def define_clause_list(composition_pattern_list):
        clause_list = []
        for (net_1, relation, net_2) in composition_pattern_list:
            clause_list.append(f'{net_1} {relation} {net_2}.')
        return clause_list
    
    
    def print_triple(graph, triple, num=-1):
        num_str = f'[{num}]' if num > -1 else '[-]'
        (s, p, o) = triple
        s = __update_uri_with_prefix(graph, s)
        p = __update_uri_with_prefix(graph, p)
        o = __update_uri_with_prefix(graph, o)
        print(f' {num_str} {s} {p} {o}')
        
    
    def add_triples_in_graph(test_file_name, graph, triple_list):
        print(f'\n -- Adding triple(s) in graph')       
        print(f" ----- Graph length before update: {len(graph)}")
        print(f" ----- Number of triples to add: {len(triple_list)}")
        
        print(f" ----- Added triples:")
        n = 0
        for triple in triple_list:
            n += 1
            print_triple(graph, triple, num=n)
            graph.add(triple)      
            
        print(f" ----- Graph length after update: {len(graph)}")
        
        output_graph_path = f'{OUTPUT_DIR_PATH}{test_file_name}.result.ttl'
        output_graph_uri = f'https://amr.tetras-libre.fr/rdf/{test_file_name}/result'
        print(f'\n -- Serialize test graph to {output_graph_path}')
        graph.serialize(destination=output_graph_path, 
                        format='turtle',
                        base=output_graph_uri)
    
    
    
    #==============================================================================
    # Development / Step Test
    #==============================================================================
            
    def test_search_pattern_1(graph):
        
        print('\n -- Step 1: Search Pattern')
        _, pattern_set = rule_1.__search_pattern(graph)
        print(f'\n ----- number of selection found: {len(pattern_set)}')
        for row in pattern_set: 
            result_str = f'>>> '
            result_str += f' {row.class_net_1.n3(graph.namespace_manager)}'
            result_str += f' {row.class_net_2.n3(graph.namespace_manager)}'
            print(result_str) 
            
        print('\n -- Step 2: Disjoint Cover Verification')
        result_set = []
        
        return pattern_set
            
        
    
    #==============================================================================
    # Unit Test
    #==============================================================================
    
    def test_rule_application(test_file_name, graph, rule):    
        print('\n -- Rule Test')
        
        rule_label, new_triple_list = rule(graph)
        print(f' ----- label: {rule_label}')
        print(f' ----- new_triple_list ({len(new_triple_list)}):')
        
        add_triples_in_graph(test_file_name, graph, new_triple_list)
        
    
    
    #==============================================================================
    # Test Script
    #==============================================================================
    
    if __name__ == '__main__':
          
        print('\n *** Test Preparation ***')
        graph_1 = load_test_graph(TEST_FILE_NAME_1)
        print('\n \n')
        
        print('\n ///////////////////// Extraction Rule 1')
        
        print('\n *** Step Test ***')
        pattern_set = test_search_pattern_1(graph_1)
        print('\n \n')
        
        print('\n *** Unit Test ***')
        test_rule_application(TEST_FILE_NAME_1, graph_1, rule_1.analyze_phenomena_mod_1)
        print('\n \n')
    
        print('\n *** - ***')