From 4bc1bcae40788d585b99505ecd557f975e986084 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Aur=C3=A9lien=20Lamercerie?= <aurelien.lamercerie@tetras-libre.fr> Date: Sat, 18 Feb 2023 13:24:26 +0100 Subject: [PATCH] New transduction package: add rdfterm_computer module --- tenet/febTransduction/net/net.py | 2 +- .../phenomena_application_or.py | 204 --------------- ...uriref_computer.py => rdfterm_computer.py} | 20 +- tenet/tenet.log | 237 ------------------ tests/test_rule.py | 35 ++- ...y => test_transduction_naming_computer.py} | 0 ... => test_transduction_rdfterm_computer.py} | 6 +- ...py => test_transduction_semantic_net_1.py} | 0 ...py => test_transduction_semantic_net_2.py} | 0 ...py => test_transduction_semantic_net_3.py} | 0 ...py => test_transduction_semantic_net_4.py} | 0 11 files changed, 42 insertions(+), 462 deletions(-) delete mode 100644 tenet/febTransduction/phenomena_application_or.py rename tenet/febTransduction/{uriref_computer.py => rdfterm_computer.py} (77%) rename tests/{test_naming_computer.py => test_transduction_naming_computer.py} (100%) rename tests/{test_transduction_uriref_computer.py => test_transduction_rdfterm_computer.py} (97%) rename tests/{test_semantic_net_1.py => test_transduction_semantic_net_1.py} (100%) rename tests/{test_semantic_net_2.py => test_transduction_semantic_net_2.py} (100%) rename tests/{test_semantic_net_3.py => test_transduction_semantic_net_3.py} (100%) rename tests/{test_semantic_net_4.py => test_transduction_semantic_net_4.py} (100%) diff --git a/tenet/febTransduction/net/net.py b/tenet/febTransduction/net/net.py index d2c0cf10..61404c55 100644 --- a/tenet/febTransduction/net/net.py +++ b/tenet/febTransduction/net/net.py @@ -14,7 +14,7 @@ from rdflib.namespace import NamespaceManager from rdflib.term import _is_valid_uri from febTransduction.net import SemanticNetReferenceHandle -from febTransduction.uriref_computer import produce_uriref +from febTransduction.rdfterm_computer import produce_uriref from febTransduction.query_builder import generate_select_query diff --git a/tenet/febTransduction/phenomena_application_or.py b/tenet/febTransduction/phenomena_application_or.py deleted file mode 100644 index 4cb1586e..00000000 --- a/tenet/febTransduction/phenomena_application_or.py +++ /dev/null @@ -1,204 +0,0 @@ -#!/usr/bin/python3.10 -# -*-coding:Utf-8 -* - -#============================================================================== -# TENET: AMR CTR at 'Net Expansion' level for phenomena application (or) -#------------------------------------------------------------------------------ -# Module grouping compositional transduction rule_sets (CTR) for the analysis -# of AMR structures, at 'Net Expansion' level -#============================================================================== - -import subprocess, os -from rdflib import Graph -from rdflib import Namespace -from rdflib.namespace import NamespaceManager - -if __name__ == '__main__': - import os, sys - LIB_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/..' - sys.path.insert(0, os.path.abspath(LIB_PATH)) - print(sys.path[0]) - -import febTransduction as transduction -from febTransduction import net -from febTransduction.query_builder import generate_select_query -from febTransduction.naming_computer import define_composite_naming_1 - - -#============================================================================== -# Rule with pattern property(class, or_phenomena) -#============================================================================== - -def __select_pattern_1(): - - # -- Select Data List - select_data_list = ['?property_net', '?class_net', '?phenomena_net'] - - # -- Clause List - clause_list = [] - clause_list.append(f'?property_net a [rdfs:subClassOf* net:Property_Net].') - clause_list.append(f'?class_net a [rdfs:subClassOf* net:Class_Net].') - clause_list.append(f'?phenomena_net a [rdfs:subClassOf* net:Phenomena_Net].') - clause_list.append(f'?phenomena_net net:hasPhenomenaType amr:phenomena_conjunction_or.') - clause_list.append(f'?property_net amr:role_ARG0 ?class_net.') - clause_list.append(f'?property_net amr:role_ARG1 ?phenomena_net.') - - # -- Query Generation - query_code = transduction.query_builder.generate_select_query(select_data_list, clause_list) - - return query_code - - # pattern = transduction.Pattern('class_net', 'property_net', 'phenomena_net') - # pattern.add_identification_pattern(phenomena_net, phenomena_type='amr:phenomena_conjunction_or') - # pattern.add_composition_pattern(property_net, 'amr:role_ARG0', class_net_0) - # pattern.add_composition_pattern(property_net, 'amr:role_ARG1', phenomena_net) - # pattern_query = pattern.get_select_query() - # return pattern_query - - -def __op_pattern_1(phenomena_net_uri, num): - - assert 1 <= num <= 9 - - # -- Select Data List - select_data_list = ['?class_net'] - - # -- Clause List - clause_list = [] - clause_list.append(f'?class_net a [rdfs:subClassOf* net:Class_Net].') - clause_list.append(f'{phenomena_net_uri} amr:role_op{num} ?class_net.') - - # -- Query Generation - query_code = transduction.query_builder.generate_select_query(select_data_list, clause_list) - - return query_code - - -def __define_restriction(net, op_set): # TODO - pass - return net - - -def analyze_phenomena_or_1(graph): - - # -- Rule Initialization - rule_label = '"or" phenomena analysis 1 [ property(class, or_phenomena) ]' - print(f"--- *** February Transduction *** Sequence: {rule_label}") - #logger.info(f"--- *** February Transduction *** Sequence: {rule_label}") - - # -- Selection Pattern Application - query_code = __select_pattern_1() - pattern_set = graph.query(query_code) - - # -- New Net Computing - new_triple_list = [] - for selection in pattern_set: - - # -- Net Composition - class_net = net.ClassNet(graph, selection.class_net) - property_net = net.PropertyNet(graph, selection.property_net) - phenomena_net = net.PhenomenaNet(graph, selection.phenomena_net) - composite_class_net = net.CompositeClassNet(graph) - composite_class_net.compose(class_net, property_net, phenomena_net) - - # -- Data Computation - composite_class_net.mother_class_net = class_net.uri - # etc - - # -- Restriction Computation - pass # TODO - # for num in range(1, 9+1): - # query_code = __op_pattern_1(selection.phenomena_net, num) - # op_set = graph.query(query_code) - # composite_class_net = __define_restriction(composite_class_net, op_set) - - # -- Relation Propagation - pass # TODO - # for (n1, rel, _) in class_net.input_relation_list: - # composite_class_net.add_input_relation(n1, rel) - # TODO: à voir si on veut d'autres relations - - # -- Net Naming - composite_class_net.naming = define_composite_naming_1(class_net, property_net, phenomena_net) - - # -- Finalization - composite_class_net.finalize() - new_triples = composite_class_net.generate_triple_definition() - new_triple_list.append(new_triples) - - return rule_label, new_triple_list - - -#============================================================================== -# Rule with pattern property(property, or_phenomena) -#============================================================================== - -def __select_pattern_2(): - - # -- Select Data List - select_data_list = ['?property_net', '?property_net_0', '?phenomena_net'] - - # -- Clause List - clause_list = [] - clause_list.append(f'?property_net a [rdfs:subClassOf* net:Property_Net].') - clause_list.append(f'?property_net_0 a [rdfs:subClassOf* net:Property_Net].') - clause_list.append(f'?phenomena_net a [rdfs:subClassOf* net:Phenomena_Net].') - clause_list.append(f'?phenomena_net net:hasPhenomenaType amr:phenomena_conjunction_or.') - clause_list.append(f'?property_net amr:role_ARG0 ?property_net_0.') - clause_list.append(f'?property_net amr:role_ARG1 ?phenomena_net.') - - # -- Query Generation - query_code = transduction.query_builder.generate_select_query(select_data_list, clause_list) - - return query_code - - -def analyze_phenomena_or_2(graph): - - # -- Rule Initialization - rule_label = '"or" phenomena analysis 2 [ property(property, or_phenomena) ]' - print(f"--- *** February Transduction *** Sequence: {rule_label}") - - # -- Selection Pattern Application - query_code = __select_pattern_2() - pattern_set = graph.query(query_code) - - # -- New Net Computing - new_triple_list = [] - for selection in pattern_set: - - # -- Net Composition - property_net_0 = net.PropertyNet(graph, uri=selection.property_net_0) - property_net = net.PropertyNet(graph, uri=selection.property_net) - phenomena_net = net.PhenomenaNet(graph, uri=selection.phenomena_net) - composite_class_net = net.CompositeClassNet(graph) - composite_class_net.compose(property_net_0, property_net, phenomena_net) - - # -- Data Computation - composite_class_net.mother_class_net = property_net_0.uri - # etc - - # -- Restriction Computation - pass # TODO - # for num in range(1, 9+1): - # query_code = __op_pattern_1(selection.phenomena_net, num) - # op_set = graph.query(query_code) - # composite_class_net = __define_restriction(composite_class_net, op_set) - - # -- Relation Propagation - pass # TODO - # for (n1, rel, _) in class_net.input_relation_list: - # composite_class_net.add_input_relation(n1, rel) - # TODO: à voir si on veut d'autres relations - - # -- Net Naming - composite_class_net.naming = define_composite_naming_1(property_net_0, property_net, phenomena_net) - - # -- Finalization - composite_class_net.finalize() - new_triples = composite_class_net.generate_triple_definition() - new_triple_list += new_triples - - return rule_label, new_triple_list - - \ No newline at end of file diff --git a/tenet/febTransduction/uriref_computer.py b/tenet/febTransduction/rdfterm_computer.py similarity index 77% rename from tenet/febTransduction/uriref_computer.py rename to tenet/febTransduction/rdfterm_computer.py index 9eb1f0b0..64cf602e 100644 --- a/tenet/febTransduction/uriref_computer.py +++ b/tenet/febTransduction/rdfterm_computer.py @@ -15,7 +15,7 @@ from rdflib.term import _is_valid_uri #============================================================================== -# Main Method(s) +# Method to produce URIRef #============================================================================== def __create_uriref(uri): @@ -52,11 +52,17 @@ def produce_uriref(graph, uri): own validation checks. """ + uriref = None if uri is not None: - uri = __create_uriref(uri) - uri = __update_uri_with_prefix(graph, uri) - uri = __create_uriref(uri) - assert _is_valid_uri(uri), f'Houston, we have a problem: URI is not a valid uri' + uriref = __create_uriref(uri) + uriref = __update_uri_with_prefix(graph, uriref) + uriref = __create_uriref(uriref) + assert _is_valid_uri(uriref), f'Houston, we have a problem: URI is not a valid uri' - return uri - \ No newline at end of file + return uriref + + + +#============================================================================== +# Method to produce Litteral +#============================================================================== \ No newline at end of file diff --git a/tenet/tenet.log b/tenet/tenet.log index ef66b101..e69de29b 100644 --- a/tenet/tenet.log +++ b/tenet/tenet.log @@ -1,237 +0,0 @@ -- INFO - [TENET] Extraction Processing -- INFO - - === Process Initialization === -- INFO - -- Process Setting -- INFO - ----- Corpus source: /home/lamenji/Workspace/Tetras/tenet/tests/input/amrDocuments/dev/solar-system-01/ (amr) -- INFO - ----- Base output dir: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/SolarSystemDev01_factoid.ttl -- INFO - ----- technical dir path: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/ -- INFO - ----- Ontology target (id): SolarSystemDev01 -- INFO - ----- Current path: /home/lamenji/Workspace/Tetras/tenet/tenet -- DEBUG - ----- Config file: /home/lamenji/Workspace/Tetras/tenet/tenet/config.xml -- DEBUG - - *** Config (Full Parameters) *** - -- Base Parameters - ----- config file: /home/lamenji/Workspace/Tetras/tenet/tenet/config.xml - ----- uuid: SolarSystemDev01 - ----- source corpus: /home/lamenji/Workspace/Tetras/tenet/tests/input/amrDocuments/dev/solar-system-01/ - ----- target reference: base - ----- process level: sentence - ----- source type: amr - -- Compositional Transduction Scheme (CTS) - ----- CTS reference: amr_scheme_1 - -- Directories - ----- base directory: ./ - ----- structure directory: ./structure/ - ----- CTS directory: ./scheme/ - ----- target frame directory: ./../input/targetFrameStructure/ - ----- input document directory: - ----- base output dir: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/SolarSystemDev01_factoid.ttl - ----- output directory: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/SolarSystemDev01_factoid.ttlSolarSystemDev01-20230217/ - ----- sentence output directory: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/ - ----- technical dir path: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/ - -- Config File Definition - ----- schema file: ./structure/amr-rdf-schema.ttl - ----- semantic net file: ./structure/semantic-net.ttl - ----- config param file: ./structure/config-parameters.ttl - ----- base ontology file: ./structure/base-ontology.ttl - ----- CTS file: ./scheme/amr_scheme_1.py - -- Useful References for Ontology - ----- base URI: https://tenet.tetras-libre.fr/working - ----- ontology suffix: -ontology.ttl - ----- ontology seed suffix: -ontology-seed.ttl - -- Source File Definition - ----- source sentence file: /home/lamenji/Workspace/Tetras/tenet/tests/input/amrDocuments/dev/solar-system-01/**/*.ttl - -- Target File Definition - ----- frame ontology file: ./../input/targetFrameStructure/base-ontology.ttl - ----- frame ontology seed file: ./../input/targetFrameStructure/base-ontology-seed.ttl - -- Output - ----- ontology namespace: https://tenet.tetras-libre.fr/base-ontology/ - ----- output file: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01.ttl - *** - *** -- DEBUG - -- Counting number of graph files (sentences) -- INFO - ----- Number of Graphs: 1 -- INFO - - === Extraction Processing === -- INFO - *** sentence 1 *** -- INFO - -- Work Structure Preparation -- DEBUG - --- Graph Initialization -- DEBUG - ----- Configuration Loading -- DEBUG - -------- RDF Schema (302) -- DEBUG - -------- Semantic Net Definition (509) -- DEBUG - -------- Config Parameter Definition (543) -- DEBUG - ----- Frame Ontology Loading -- DEBUG - -------- Base Ontology produced as output (573) -- DEBUG - --- Source Data Import -- DEBUG - ----- Sentence Loading -- DEBUG - -------- /home/lamenji/Workspace/Tetras/tenet/tests/input/amrDocuments/dev/solar-system-01/SSC-01-01.stog.amr.ttl (621) -- DEBUG - --- Export work graph as turtle -- DEBUG - ----- Work graph file: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01-1/SolarSystemDev01.ttl -- INFO - ----- Sentence (id): SSC-01-01 -- INFO - ----- Sentence (text): The Solar System is the gravitationally bound system of the Sun and the objects that orbit it, either directly or indirectly. -- INFO - -- Loading Extraction Scheme (amr_scheme_1) -- DEBUG - ----- Step number: 3 -- INFO - -- Loading Extraction Rules (amr_rule/*) -- DEBUG - ----- Total rule number: 87 -- INFO - -- Applying extraction step: preprocessing -- INFO - --- *** November Transduction *** Sequence: amrld-correcting-sequence -- INFO - ----- fix-amr-bug-about-system-solar-planet: 5/5 new triples (626, 0:00:00.048350) -- INFO - --- *** November Transduction *** Sequence: amr-reification-sequence -- INFO - ----- reclassify-concept-1: 10/10 new triples (636, 0:00:00.131841) -- DEBUG - ----- reclassify-concept-2: 0/0 new triple (636, 0:00:00.074151) -- INFO - ----- reclassify-concept-3: 12/12 new triples (648, 0:00:00.050096) -- INFO - ----- reclassify-concept-4: 16/16 new triples (664, 0:00:00.069943) -- INFO - ----- reclassify-concept-5: 2/4 new triples (666, 0:00:00.042364) -- INFO - ----- reify-roles-as-concept: 10/10 new triples (676, 0:00:00.054633) -- INFO - ----- reclassify-existing-variable: 45/45 new triples (721, 0:00:00.035203) -- INFO - ----- add-new-variable-for-reified-concept: 8/8 new triples (729, 0:00:00.063932) -- INFO - ----- add-amr-leaf-for-reclassified-concept: 33/33 new triples (762, 0:00:00.046697) -- INFO - ----- add-amr-leaf-for-reified-concept: 8/8 new triples (770, 0:00:00.062658) -- INFO - ----- add-amr-edge-for-core-relation: 27/27 new triples (797, 0:00:00.119510) -- INFO - ----- add-amr-edge-for-reified-concept: 12/12 new triples (809, 0:00:00.134834) -- INFO - ----- add-amr-edge-for-name-relation: 5/5 new triples (814, 0:00:00.060733) -- DEBUG - ----- add-value-for-quant-relation: 0/0 new triple (814, 0:00:00.069739) -- INFO - ----- add-amr-edge-for-polarity-relation: 5/5 new triples (819, 0:00:00.066134) -- INFO - ----- update-amr-edge-role-1: 15/15 new triples (834, 0:00:00.101905) -- INFO - ----- add-amr-root: 5/5 new triples (839, 0:00:00.024756) -- DEBUG - --- Serializing graph to SolarSystemDev01_preprocessing -- DEBUG - ----- step: preprocessing -- DEBUG - ----- id: SolarSystemDev01 -- DEBUG - ----- work_file: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01-1/SolarSystemDev01_preprocessing.ttl -- DEBUG - ----- base: http://SolarSystemDev01/preprocessing -- INFO - ----- 218 triples extracted during preprocessing step -- INFO - -- Applying extraction step: transduction -- INFO - --- *** November Transduction *** Sequence: atomic-extraction-sequence -- INFO - ----- create-atom-class-net: 35/35 new triples (874, 0:00:00.066367) -- DEBUG - ----- (refinement) refine-cover-node-1: 5 new triples (879) -- DEBUG - ----- (refinement) refine-cover-node-2: 5 new triples (884) -- INFO - ----- create-individual-net-1: 10/10 new triples (894, 0:00:00.087596) -- DEBUG - ----- (refinement) refine-cover-node-1: 1 new triples (895) -- INFO - ----- create-atom-property-net-1: 88/88 new triples (983, 0:00:00.146487) -- DEBUG - ----- (refinement) refine-cover-node-1: 6 new triples (989) -- INFO - ----- create-value-net: 17/17 new triples (1006, 0:00:00.092233) -- INFO - ----- create-phenomena-net-1: 24/25 new triples (1030, 0:00:00.073087) -- DEBUG - ----- (refinement) refine-cover-node-1: 2 new triples (1032) -- INFO - --- *** November Transduction *** Sequence: atomic-extraction-sequence -- INFO - ----- create-atom-class-net: 1/49 new triple (1033, 0:00:00.080771) -- DEBUG - ----- create-individual-net-1: 0/10 new triple (1033, 0:00:00.052563) -- INFO - ----- create-atom-property-net-1: 1/95 new triple (1034, 0:00:00.154358) -- DEBUG - ----- create-value-net: 0/17 new triple (1034, 0:00:00.050105) -- DEBUG - ----- create-phenomena-net-1: 0/25 new triple (1034, 0:00:00.057190) -- INFO - --- *** November Transduction *** Sequence: phenomena-application-polarity-sequence -- INFO - ----- polarity-phenomena-application: 8/9 new triples (1042, 0:00:00.109839) -- DEBUG - ----- (refinement) refine-cover-node-1: 1 new triples (1043) -- INFO - --- *** November Transduction *** Sequence: phenomena-application-mod-sequence -- DEBUG - ----- mod-phenomena-application-1: 0/0 new triple (1043, 0:00:00.081777) -- DEBUG - ----- mod-phenomena-application-2: 0/0 new triple (1043, 0:00:00.038491) -- DEBUG - ----- mod-phenomena-application-3: 0/0 new triple (1043, 0:00:00.078474) -- INFO - --- *** November Transduction *** Sequence: phenomena-application-and-sequence -- INFO - ----- and-conjunction-phenomena-application-1: 14/17 new triples (1057, 0:00:00.170078) -- DEBUG - ----- (refinement) refine-cover-node-1: 1 new triples (1058) -- INFO - ----- and-conjunction-phenomena-application-2: 1/1 new triple (1059, 0:00:00.106857) -- INFO - ----- and-conjunction-phenomena-application-3: 14/14 new triples (1073, 0:00:00.129548) -- INFO - ----- and-conjunction-phenomena-application-4: 14/14 new triples (1087, 0:00:00.166570) -- DEBUG - ----- (refinement) refine-cover-node-2: 1 new triples (1088) -- INFO - ----- and-conjunction-phenomena-application-5: 6/9 new triples (1094, 0:00:00.062548) -- INFO - ----- and-conjunction-phenomena-application-6: 2/2 new triples (1096, 0:00:00.205600) -- INFO - --- *** January Transduction *** Sequence: "or" phenomena analysis 1 (targetting class) -- DEBUG - ----- new net construction: 0/0 new triple (1096, 0:00:00.056004) -- INFO - --- *** January Transduction *** Sequence: "or" phenomena analysis 2 (targetting property) -- INFO - ----- new net construction: 9/9 new triples (1105, 0:00:00.055170) -- INFO - --- *** February Transduction *** Sequence: phenomena_or_analyze_sequence -- DEBUG - ----- "or" phenomena analysis 1 [ property(class, or_phenomena) ]: 0/0 new triple (1105, 0:00:00.011168) -- INFO - ----- "or" phenomena analysis 2 [ property(property, or_phenomena) ]: 1/1 new triple (1106, 0:00:00.044370) -- INFO - --- *** November Transduction *** Sequence: phenomena-checking-sequence -- INFO - ----- expand-and-conjunction-phenomena-net: 8/8 new triples (1114, 0:00:00.014320) -- DEBUG - ----- (refinement) refine-cover-node-2: 1 new triples (1115) -- DEBUG - ----- expand-degree-phenomena-net-1: 0/0 new triple (1115, 0:00:00.008087) -- DEBUG - ----- expand-degree-phenomena-net-2: 0/0 new triple (1115, 0:00:00.011609) -- DEBUG - ----- expand-degree-phenomena-net-3: 0/0 new triple (1115, 0:00:00.008746) -- DEBUG - ----- expand-degree-phenomena-net-4: 0/0 new triple (1115, 0:00:00.007308) -- DEBUG - ----- expand-degree-phenomena-net-5: 0/0 new triple (1115, 0:00:00.007772) -- DEBUG - ----- expand-degree-phenomena-net-6: 0/0 new triple (1115, 0:00:00.007716) -- INFO - --- *** November Transduction *** Sequence: composite-property-extraction-sequence -- DEBUG - ----- create-composite-class-net-from-property-1: 0/0 new triple (1115, 0:00:00.087769) -- DEBUG - ----- create-composite-class-net-from-property-2: 0/0 new triple (1115, 0:00:00.224745) -- INFO - --- *** November Transduction *** Sequence: composite-class-extraction-sequence-1 -- INFO - ----- create-composite-class-net-from-property-1: 48/54 new triples (1163, 0:00:00.555766) -- DEBUG - ----- (refinement) refine-cover-node-1: 7 new triples (1170) -- DEBUG - ----- (refinement) refine-cover-node-2: 3 new triples (1173) -- DEBUG - ----- create-composite-class-net-from-property-2: 0/0 new triple (1173, 0:00:00.130480) -- INFO - ----- create-composite-class-net-from-property-3: 50/57 new triples (1223, 0:00:00.457728) -- INFO - --- *** November Transduction *** Sequence: composite-class-extraction-sequence-2 -- DEBUG - ----- create-composite-class-net-from-phenomena-1: 0/0 new triple (1223, 0:00:00.036293) -- DEBUG - ----- create-composite-class-net-from-phenomena-2: 0/0 new triple (1223, 0:00:00.056631) -- DEBUG - ----- create-composite-class-net-from-phenomena-3: 0/0 new triple (1223, 0:00:00.037681) -- DEBUG - ----- create-composite-class-net-from-phenomena-4: 0/0 new triple (1223, 0:00:00.048736) -- INFO - --- *** November Transduction *** Sequence: restriction-adding-sequence -- DEBUG - ----- add-restriction-to-class-net-from-property-1: 0/0 new triple (1223, 0:00:00.042469) -- INFO - --- *** November Transduction *** Sequence: classification-sequence -- INFO - ----- classify-net-from-core-1: 8/8 new triples (1231, 0:00:00.008805) -- INFO - ----- classify-net-from-core-2: 1/7 new triple (1232, 0:00:00.008024) -- DEBUG - ----- classify-net-from-core-3: 0/0 new triple (1232, 0:00:00.045640) -- DEBUG - ----- classify-net-from-part: 0/0 new triple (1232, 0:00:00.007531) -- INFO - ----- classify-net-from-domain: 9/9 new triples (1241, 0:00:00.008094) -- DEBUG - ----- classify-net-from-degree-phenomena-1: 0/0 new triple (1241, 0:00:00.010192) -- DEBUG - ----- classify-net-from-degree-phenomena-2: 0/0 new triple (1241, 0:00:00.038002) -- DEBUG - ----- classify-net-from-degree-phenomena-3: 0/0 new triple (1241, 0:00:00.006364) -- INFO - ----- propagate-individual-1: 1/1 new triple (1242, 0:00:00.006416) -- INFO - ----- propagate-individual-2: 5/5 new triples (1247, 0:00:00.008293) -- DEBUG - ----- reclassify-deprecated-net: 0/0 new triple (1247, 0:00:00.005420) -- DEBUG - --- Serializing graph to SolarSystemDev01_transduction -- DEBUG - ----- step: transduction -- DEBUG - ----- id: SolarSystemDev01 -- DEBUG - ----- work_file: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01-1/SolarSystemDev01_transduction.ttl -- DEBUG - ----- base: http://SolarSystemDev01/transduction -- INFO - ----- 408 triples extracted during transduction step -- INFO - -- Applying extraction step: generation -- INFO - --- *** November Transduction *** Sequence: main-generation-sequence -- INFO - ----- compute-uri-for-owl-declaration-1: 8/8 new triples (1255, 0:00:00.030044) -- INFO - ----- compute-uri-for-owl-declaration-2: 1/4 new triple (1256, 0:00:00.019682) -- INFO - ----- compute-uri-for-owl-declaration-3: 1/1 new triple (1257, 0:00:00.049842) -- DEBUG - ----- compute-uri-for-owl-declaration-4: 0/0 new triple (1257, 0:00:00.018787) -- INFO - ----- compute-uri-for-owl-declaration-5: 6/6 new triples (1263, 0:00:00.019141) -- INFO - ----- compute-uri-for-owl-declaration-6: 5/5 new triples (1268, 0:00:00.020967) -- INFO - ----- generate-atom-class: 12/12 new triples (1280, 0:00:00.007831) -- INFO - ----- classify-atom-class-1: 4/4 new triples (1284, 0:00:00.006734) -- DEBUG - ----- classify-atom-class-2: 0/0 new triple (1284, 0:00:00.015968) -- INFO - ----- generate-individual: 3/3 new triples (1287, 0:00:00.008033) -- DEBUG - ----- classify-individual-1: 0/0 new triple (1287, 0:00:00.007648) -- INFO - ----- classify-individual-2: 4/4 new triples (1291, 0:00:00.008491) -- INFO - ----- generate-atom-property-1: 16/16 new triples (1307, 0:00:00.010658) -- INFO - ----- generate-atom-property-12: 8/16 new triples (1315, 0:00:00.011613) -- DEBUG - ----- generate-inverse-relation: 0/0 new triple (1315, 0:00:00.006359) -- INFO - ----- generate-composite-class: 18/18 new triples (1333, 0:00:00.008989) -- DEBUG - ----- add-restriction-to-class-1: 0/0 new triple (1333, 0:00:00.012511) -- DEBUG - ----- add-restriction-to-class-2: 0/0 new triple (1333, 0:00:00.012346) -- INFO - ----- add-restriction-to-class-3: 20/24 new triples (1353, 0:00:00.016476) -- DEBUG - ----- add-restriction-to-class-4: 0/0 new triple (1353, 0:00:00.010946) -- DEBUG - ----- add-restriction-to-class-5: 0/0 new triple (1353, 0:00:00.011763) -- DEBUG - ----- add-restriction-to-class-6: 0/0 new triple (1353, 0:00:00.010090) -- DEBUG - ----- generate-composite-property: 0/0 new triple (1353, 0:00:00.007500) -- DEBUG - --- Serializing graph to SolarSystemDev01_generation -- DEBUG - ----- step: generation -- DEBUG - ----- id: SolarSystemDev01 -- DEBUG - ----- work_file: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01-1/SolarSystemDev01_generation.ttl -- DEBUG - ----- base: http://SolarSystemDev01/generation -- INFO - ----- 106 triples extracted during generation step -- INFO - -- Result: file containing only the factoids -- DEBUG - --- Making factoid graph with the last step result -- DEBUG - ----- Number of factoids: 121 -- DEBUG - ----- Graph base: http://SolarSystemDev01/factoid -- DEBUG - --- Serializing graph to factoid file (/home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/technical-data/SolarSystemDev01-1/SolarSystemDev01_factoid.ttl) -- INFO - - === Final Ontology Generation === -- INFO - -- Making complete factoid graph by merging the result factoids -- INFO - ----- Total factoid number: 121 -- INFO - -- Serializing graph to factoid string -- INFO - ----- Graph base: http://SolarSystemDev01/factoid -- INFO - -- Serializing graph to factoid file -- INFO - ----- Ontology Turtle File: /home/lamenji/Workspace/Tetras/tenet/tests/output/SolarSystemDev01-20230217/SolarSystemDev01_factoid.ttl -- INFO - - === Done === -- INFO - - *** Execution Time *** ------ Function: create_ontology_from_amrld_dir (main) ------ Total Time: 0:00:07.492941 ------ Process Time: 0:00:07.446408 - *** - *** diff --git a/tests/test_rule.py b/tests/test_rule.py index 461bcb7e..f9a1fd9e 100644 --- a/tests/test_rule.py +++ b/tests/test_rule.py @@ -8,9 +8,9 @@ #============================================================================== import subprocess, os -from rdflib import Graph -from rdflib import Namespace -from rdflib.namespace import NamespaceManager +from rdflib import Graph, Namespace +from rdflib.namespace import NamespaceManager, FOAF, RDF +from rdflib import URIRef, Literal, BNode FILE_PATH = f'{os.path.dirname(os.path.abspath(__file__))}' INPUT_DIR_PATH = f'{FILE_PATH}/input/' @@ -18,7 +18,9 @@ OUTPUT_DIR_PATH = f'{FILE_PATH}/output/' TEST_GRAPH = f'{INPUT_DIR_PATH}testGraph1.ttl' from context import tenet -from tenet.febTransduction import phenomena_application_or as test_rule +from tenet.scheme.amr_rule.transduction import phenomena_application_or_1 as rule_1 +from tenet.scheme.amr_rule.transduction import phenomena_application_or_2 as rule_2 +from tenet.scheme import amr_rule as rule from tenet.febTransduction import query_builder from tenet.febTransduction import prefix_handle @@ -54,7 +56,7 @@ def define_clause_list(composition_pattern_list): def devtest_select_pattern_application_1(graph): print('\n -- Select Pattern Application 1') - query_code = test_rule.__select_pattern_1() + query_code = rule_1.__select_pattern_1() print(query_code) pattern_result_set = graph.query(query_code) print(f'\n ----- number of selection found: {len(pattern_result_set)}') @@ -68,7 +70,7 @@ def devtest_select_pattern_application_1(graph): def devtest_select_pattern_application_2(graph): print('\n -- Select Pattern Application 2') - query_code = test_rule.__select_pattern_2() + query_code = rule_2.__select_pattern_2() print(query_code) pattern_result_set = graph.query(query_code) print(f'\n ----- number of selection found: {len(pattern_result_set)}') @@ -89,6 +91,14 @@ def devtest_insert_query(graph): print(test_query) graph.update(test_query) print(f"----- Graph Updated ({len(graph)})") + + +def devtest_add_triple(graph, triple): + print(f'\n -- Adding triple in a graph') + print(f"----- Graph length before update: {len(graph)}") + print(f"----- Triple added: {triple}") + graph.add(triple) + print(f"----- Graph length after update: {len(graph)}") @@ -100,9 +110,10 @@ def unittest_run_rule(graph, rule): print('\n -- Rule Test') rule_label, new_triple_list = rule(graph) print(f' ----- label: {rule_label}') - print(f' ----- new_triple_list: {len(new_triple_list)}') + print(f' ----- new_triple_list ({len(new_triple_list)}):') for new_triple in new_triple_list: - print(f' | {new_triple}') + (s, p, o) = new_triple + print(f' | {s}, {p}, {o}') @@ -114,17 +125,21 @@ if __name__ == '__main__': print('\n *** Test Preparation ***') graph = load_test_graph() + uriref = URIRef('net:compositeClass_orbit_hasManner_conjunction-OR') + type_uriref = URIRef('net:Composite_Class_Net') + triple = (uriref, RDF.type, type_uriref) print('\n \n') print('\n *** Development Test ***') devtest_select_pattern_application_1(graph) devtest_select_pattern_application_2(graph) devtest_insert_query(graph) + devtest_add_triple(graph, triple) print('\n \n') print('\n *** Unit Test ***') - unittest_run_rule(graph, test_rule.analyze_phenomena_or_1) - unittest_run_rule(graph, test_rule.analyze_phenomena_or_2) + unittest_run_rule(graph, rule.analyze_phenomena_or_1) + unittest_run_rule(graph, rule.analyze_phenomena_or_2) print('\n \n') print('\n *** - ***') \ No newline at end of file diff --git a/tests/test_naming_computer.py b/tests/test_transduction_naming_computer.py similarity index 100% rename from tests/test_naming_computer.py rename to tests/test_transduction_naming_computer.py diff --git a/tests/test_transduction_uriref_computer.py b/tests/test_transduction_rdfterm_computer.py similarity index 97% rename from tests/test_transduction_uriref_computer.py rename to tests/test_transduction_rdfterm_computer.py index f376b1ee..1a063ea7 100644 --- a/tests/test_transduction_uriref_computer.py +++ b/tests/test_transduction_rdfterm_computer.py @@ -21,7 +21,7 @@ TEST_GRAPH = f'{INPUT_DIR_PATH}testGraph1.ttl' from context import tenet import utility -from tenet.febTransduction import uriref_computer +from tenet.febTransduction import rdfterm_computer @@ -89,7 +89,7 @@ def devtest_create_uriref(graph, uri): def unittest_produce_uriref(graph, uri): print(f'\n -- Produce valid URI Reference') try: - uriref = uriref_computer.produce_uriref(graph, uri) + uriref = rdfterm_computer.produce_uriref(graph, uri) if uriref is None: print(f' ----- None') else: # uriref is not None @@ -113,7 +113,7 @@ def unittest_produce_uriref(graph, uri): def unittest_failure_produce_uriref(graph, uri): print(f'\n -- * Failure to produce URI Reference *') try: - uriref = uriref_computer.produce_uriref(graph, uri) + uriref = rdfterm_computer.produce_uriref(graph, uri) print(f'Houston, we have a problem: production is successful') except: print(f' ----- OK FAILURE!') diff --git a/tests/test_semantic_net_1.py b/tests/test_transduction_semantic_net_1.py similarity index 100% rename from tests/test_semantic_net_1.py rename to tests/test_transduction_semantic_net_1.py diff --git a/tests/test_semantic_net_2.py b/tests/test_transduction_semantic_net_2.py similarity index 100% rename from tests/test_semantic_net_2.py rename to tests/test_transduction_semantic_net_2.py diff --git a/tests/test_semantic_net_3.py b/tests/test_transduction_semantic_net_3.py similarity index 100% rename from tests/test_semantic_net_3.py rename to tests/test_transduction_semantic_net_3.py diff --git a/tests/test_semantic_net_4.py b/tests/test_transduction_semantic_net_4.py similarity index 100% rename from tests/test_semantic_net_4.py rename to tests/test_transduction_semantic_net_4.py -- GitLab