Skip to content
Snippets Groups Projects
Commit 4a7cd0e0 authored by Aurélien Lamercerie's avatar Aurélien Lamercerie
Browse files

New transduction module (development in progress)

parent 5b70f8fa
No related branches found
No related tags found
No related merge requests found
Showing
with 959 additions and 1760 deletions
# -- Update System Path
import os, sys
LIB_PATH = os.path.dirname(os.path.abspath(__file__)) + '/'
print('Running in ' + LIB_PATH)
os.chdir(LIB_PATH)
sys.path.insert(0, os.path.abspath(LIB_PATH))
# -- Main Methods # -- Main Methods
from .main import create_ontology_from_amrld_file from main import create_ontology_from_amrld_file
from .main import create_ontology_from_amrld_dir from main import create_ontology_from_amrld_dir
#from .main import create_ontology_from_unlrdf_file #from .main import create_ontology_from_unlrdf_file
\ No newline at end of file
...@@ -44,7 +44,7 @@ RULE_STRING = """ *** Compositional Transduction Rule (CTR) *** ...@@ -44,7 +44,7 @@ RULE_STRING = """ *** Compositional Transduction Rule (CTR) ***
# Class # Class
#============================================================================== #==============================================================================
class Rule: class OldRule:
""" Class to define a Compositional Transduction Rule (CTR). """ Class to define a Compositional Transduction Rule (CTR).
""" """
......
...@@ -51,7 +51,7 @@ def get_rule_key_list_str(rule_key_list): ...@@ -51,7 +51,7 @@ def get_rule_key_list_str(rule_key_list):
# Class # Class
#============================================================================== #==============================================================================
class Sequence: class OldSequence:
""" Class to define a Transduction Sequence. """ Class to define a Transduction Sequence.
""" """
......
...@@ -24,8 +24,8 @@ import importlib.util ...@@ -24,8 +24,8 @@ import importlib.util
import importlib import importlib
from utility.timer import timed from utility.timer import timed
from extraction.rule import Rule from extraction.old_rule import OldRule
from extraction.sequence import Sequence from extraction.old_sequence import OldSequence
#============================================================================== #==============================================================================
...@@ -62,7 +62,7 @@ def get_new_rule_set(rule_def_set, prefix_list): ...@@ -62,7 +62,7 @@ def get_new_rule_set(rule_def_set, prefix_list):
rule_set = {} rule_set = {}
for rule_key, rule_def in rule_def_set.items(): for rule_key, rule_def in rule_def_set.items():
rule = Rule() rule = OldRule()
rule.load_dict(rule_def) rule.load_dict(rule_def)
rule.load_prefix_list(prefix_list) rule.load_prefix_list(prefix_list)
rule_set[rule_key] = rule rule_set[rule_key] = rule
...@@ -135,7 +135,7 @@ def load_rule_set(config, rule_dir, prefix_list): ...@@ -135,7 +135,7 @@ def load_rule_set(config, rule_dir, prefix_list):
#============================================================================== #==============================================================================
def _prepare_sequence(sequence_def, rule_set): def _prepare_sequence(sequence_def, rule_set):
sequence = Sequence() sequence = OldSequence()
sequence.load_sequence_from_dict(sequence_def) sequence.load_sequence_from_dict(sequence_def)
sequence.load_rule_list(rule_set) sequence.load_rule_list(rule_set)
return sequence return sequence
...@@ -278,7 +278,6 @@ def apply_step(config, graph, rule_set, step_name, step_sequence_def): ...@@ -278,7 +278,6 @@ def apply_step(config, graph, rule_set, step_name, step_sequence_def):
# Main Function # Main Function
#============================================================================== #==============================================================================
@timed
def apply(config, graph): def apply(config, graph):
""" Apply extraction process on the working graph """ """ Apply extraction process on the working graph """
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
import glob import glob
import logging import logging
from rdflib import Graph from rdflib import Graph
import time, datetime #import time, datetime
#============================================================================== #==============================================================================
...@@ -160,7 +160,6 @@ def prepare_work_graph(config, sentence_file): ...@@ -160,7 +160,6 @@ def prepare_work_graph(config, sentence_file):
try: try:
logger.info("-- Work Structure Preparation") logger.info("-- Work Structure Preparation")
exec_start = time.perf_counter()
# -- Graph Initialization # -- Graph Initialization
logger.debug("--- Graph Initialization") logger.debug("--- Graph Initialization")
...@@ -188,13 +187,6 @@ def prepare_work_graph(config, sentence_file): ...@@ -188,13 +187,6 @@ def prepare_work_graph(config, sentence_file):
logger.info(f"----- Sentence (id): {graphId}") logger.info(f"----- Sentence (id): {graphId}")
logger.info(f"----- Sentence (text): {graphSentence}") logger.info(f"----- Sentence (text): {graphSentence}")
# -- Ending
exec_end = time.perf_counter()
exec_time = exec_end - exec_start
exec_time_date = datetime.timedelta(seconds=exec_time)
logger.debug("--- Ending Structure Preparation ")
logger.debug("----- Total Execution Time = " + str(exec_time_date))
return work_graph return work_graph
except: except:
......
...@@ -12,46 +12,41 @@ import shutil ...@@ -12,46 +12,41 @@ import shutil
from rdflib import Graph from rdflib import Graph
import logging.config import logging.config
# Set main file directory as reference
LIB_PATH = os.path.dirname(os.path.abspath(__file__)) + '/'
print('Running in ' + LIB_PATH)
os.chdir(LIB_PATH)
sys.path.insert(1, LIB_PATH)
from extraction import config, structure, process from extraction import config, structure, process
from utility.timer import timed from utility.timer import timed
# -- Config File Path
LIB_PATH = os.path.dirname(os.path.abspath(__file__)) + '/'
LOGGING_CONF_FILE_PATH = f'{LIB_PATH}logging.conf'
CONFIG_FILE_PATH = f'{LIB_PATH}config.xml'
# -- Logging # -- Logging
logging.config.fileConfig('logging.conf', disable_existing_loggers=False) logging.config.fileConfig(LOGGING_CONF_FILE_PATH, disable_existing_loggers=False)
logger = logging.getLogger('root') logger = logging.getLogger('root')
# Configuration
CONFIG_FILE = "config.xml"
#============================================================================== #==============================================================================
# Steps # Steps
#============================================================================== #==============================================================================
def set_config(args, technical_dir_path): def set_config(source_type, source_corpus, onto_prefix,
base_output_dir, technical_dir_path):
logger.info("-- Process Setting ") logger.info("-- Process Setting ")
logger.info("----- Corpus source: {0} ({1})".format(args['source_corpus'], logger.info(f'----- Corpus source: {source_corpus} ({source_type})')
args['source_type'])) logger.info(f'----- Base output dir: {base_output_dir}')
logger.info("----- Base output dir: {0}".format(args['base_output_dir']))
logger.info(f'----- technical dir path: {technical_dir_path}') logger.info(f'----- technical dir path: {technical_dir_path}')
logger.info("----- Ontology target (id): {0}".format(args['target_id'])) logger.info(f'----- Ontology target (id): {onto_prefix}')
logger.debug("----- Current path: {0}".format(os.getcwd())) logger.info(f'----- Current path: {os.getcwd()}')
logger.debug("----- Config file: {0}".format(CONFIG_FILE)) logger.debug(f'----- Config file: {CONFIG_FILE_PATH}')
process_config = config.Config(CONFIG_FILE, process_config = config.Config(CONFIG_FILE_PATH,
args['target_id'], onto_prefix,
args['source_corpus'], source_corpus,
#target_ontology, base_output_dir = base_output_dir,
base_output_dir = args['base_output_dir'],
technical_dir_path = technical_dir_path technical_dir_path = technical_dir_path
) )
process_config.source_type = args['source_type'] process_config.source_type = source_type
# config.output_ontology_namespace = target_ontology_namespace # config.output_ontology_namespace = target_ontology_namespace
logger.debug(process_config.get_full_config()) logger.debug(process_config.get_full_config())
...@@ -134,29 +129,24 @@ def create_ontology_from_amrld_file(amrld_file_path, ...@@ -134,29 +129,24 @@ def create_ontology_from_amrld_file(amrld_file_path,
logger.info('[TENET] Extraction Processing') logger.info('[TENET] Extraction Processing')
# -- Process Initialization # -- Process Initialization
logger.info(' === Process Initialization === ') logger.info('\n === Process Initialization === ')
logger.info('-- current dir: {0}'.format(os.getcwd())) if onto_prefix is None: onto_prefix = 'DefaultId'
if onto_prefix is None: onto_prefix = 'DefaultTargetId' config = set_config('amr', amrld_file_path, onto_prefix,
args = { out_file_path, technical_dir_path)
'source_type': 'amr',
'source_corpus': amrld_file_path,
'target_id': onto_prefix,
'base_output_dir': out_file_path}
config = set_config(args, technical_dir_path)
init_process(config) init_process(config)
# -- Extraction Processing # -- Extraction Processing
logger.info(' === Extraction Processing === ') logger.info('\n === Extraction Processing === ')
config.sentence_output_dir = f'-0' config.sentence_output_dir = f'-0'
result_triple_list = apply_extraction(config, amrld_file_path) result_triple_list = apply_extraction(config, amrld_file_path)
# -- Final Ontology Generation (factoid_graph) # -- Final Ontology Generation (factoid_graph)
logger.info(' === Final Ontology Generation === ') logger.info('\n === Final Ontology Generation === ')
factoid_graph = generate_final_ontology(result_triple_list) factoid_graph = generate_final_ontology(result_triple_list)
ontology_turtle_string = serialize_factoid_graph(config, factoid_graph, out_file_path) ontology_turtle_string = serialize_factoid_graph(config, factoid_graph, out_file_path)
# -- Done # -- Done
logger.info(' === Done === ') logger.info('\n === Done === ')
if config.technical_dir_path is not None: if config.technical_dir_path is not None:
log_file_name = 'tenet.log' log_file_name = 'tenet.log'
dest_file_path = f'{config.technical_dir_path}{log_file_name}' dest_file_path = f'{config.technical_dir_path}{log_file_name}'
...@@ -193,19 +183,14 @@ def create_ontology_from_amrld_dir(amrld_dir_path, ...@@ -193,19 +183,14 @@ def create_ontology_from_amrld_dir(amrld_dir_path,
logger.info('[TENET] Extraction Processing') logger.info('[TENET] Extraction Processing')
# -- Process Initialization # -- Process Initialization
logger.info(' === Process Initialization === ') logger.info('\n === Process Initialization === ')
logger.info('-- current dir: {0}'.format(os.getcwd())) if onto_prefix is None: onto_prefix = 'DefaultId'
if onto_prefix is None: onto_prefix = 'DefaultTargetId' config = set_config('amr', amrld_dir_path, onto_prefix,
args = { out_file_path, technical_dir_path)
'source_type': 'amr',
'source_corpus': amrld_dir_path,
'target_id': onto_prefix,
'base_output_dir': out_file_path}
config = set_config(args, technical_dir_path)
init_process(config) init_process(config)
# -- Extraction Processing # -- Extraction Processing
logger.info(' === Extraction Processing === ') logger.info('\n === Extraction Processing === ')
sentence_dir = config.source_sentence_file sentence_dir = config.source_sentence_file
sentence_count = 0 sentence_count = 0
result_triple_list = [] result_triple_list = []
...@@ -217,12 +202,12 @@ def create_ontology_from_amrld_dir(amrld_dir_path, ...@@ -217,12 +202,12 @@ def create_ontology_from_amrld_dir(amrld_dir_path,
result_triple_list.extend(new_triple_list) result_triple_list.extend(new_triple_list)
# -- Final Ontology Generation (factoid_graph) # -- Final Ontology Generation (factoid_graph)
logger.info(' === Final Ontology Generation === ') logger.info('\n === Final Ontology Generation === ')
factoid_graph = generate_final_ontology(result_triple_list) factoid_graph = generate_final_ontology(result_triple_list)
ontology_turtle_string = serialize_factoid_graph(config, factoid_graph, out_file_path) ontology_turtle_string = serialize_factoid_graph(config, factoid_graph, out_file_path)
# -- Done # -- Done
logger.info(' === Done === ') logger.info('\n === Done === ')
if config.technical_dir_path is not None: if config.technical_dir_path is not None:
log_file_name = 'tenet.log' log_file_name = 'tenet.log'
dest_file_path = f'{config.technical_dir_path}{log_file_name}' dest_file_path = f'{config.technical_dir_path}{log_file_name}'
......
...@@ -23,50 +23,6 @@ from query_builder.element.net import Net ...@@ -23,50 +23,6 @@ from query_builder.element.net import Net
rule_set = {} rule_set = {}
# ---------------------------------------------
# Net Expansion: Polarity Phenomena Application
# ---------------------------------------------
rule_set['polarity-phenomena-application'] = {
'label': "polarity-phenomena-application",
'comment': ("Apply polarity phenomena to: ",
" (1) polarity relation on a property class. "),
'construction': f"""
# !!! TODO: restriction for negative polarity !!!
{composite_property_net.construct(base_node=atom_property_net.base_node,
property_name='?newPropertyName')}
{composite_property_net.propagate_relations()}
{atom_property_net.deprecate()}
""",
'clause': f"""
# *** Identify Polarity(p0:Property, 'negative') ***
{atom_property_net.identify_net(core_role='true')}
{atom_property_net.identify_relation('amr:role_polarity', value_net.id)}
{value_net.identify_net(value_label="'negative'")}
# *** Identify variable label of base leaf ***
{atom_property_net.base_node} a amr:AMR_Leaf ;
amr:hasVariable ?variable.
?variable amr:label ?varLabel.
{composite_property_net.complete_clauses_for_construction(atom_property_net.base_node)}
""",
'binding': f"""
# -- New Property Name
BIND (CONCAT('not', '-', {atom_property_net.property_name}) AS ?newPropertyName).
{composite_property_net.bind_uri('?newPropertyName', '?varLabel')}
"""
}
# --------------------------------------------- # ---------------------------------------------
# Net Expansion: Conjunction Phenomena Net # Net Expansion: Conjunction Phenomena Net
# --------------------------------------------- # ---------------------------------------------
...@@ -271,15 +227,27 @@ rule_set['and-conjunction-phenomena-application-6'] = { ...@@ -271,15 +227,27 @@ rule_set['and-conjunction-phenomena-application-6'] = {
# --------------------------------------------- # ---------------------------------------------
# Net Expansion: Degree Phenomena Net # *** Under Development *** New Rule
# --------------------------------------------- # ---------------------------------------------
# TODO # -- Useful query builder(s)
# * TODO * property_net = PropertyNet()
# * TODO * class_net_0 = ClassNet(0)
# * TODO * phenomena_net = PhenomenaNet()
# * TODO * and_composite_class_net = AndCompositeClassNet()
def analyze_phenomena_and():
rule = 'NewRule()' # !!! TODO !!!
# -- {composite_class_net.propagate_relations()} # rule.clause(f'{property_net.identify_net(core_role="true")}')
# rule.clause(f'{property_net.id} amr:role_ARG0 {class_net_0.id}.')
# rule.clause(f'{property_net.id} amr:role_ARG1 {phenomena_net.id}.')
# rule.clause(f'{phenomena_net.identify_net(phenomena_type='amr:phenomena_conjunction_and')}')
# -- {logical_set_net.deprecate()} # rule.operate(f'{and_composite_class_net.construct(*TODO*)}')
# *** Complete clauses for net construction *** # * TODO * add restrictions for and_composite_class_net --op--> class_net
# -- {composite_class_net.complete_clauses_for_construction(class_net_0.base_node)}
# * TODO * rule.operate(and_composite_class_net.complete_clauses_for_construction())
return rule
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: AMR CTR at 'Net Expansion' level
#------------------------------------------------------------------------------
# Module grouping compositional transduction rule_sets (CTR) for the analysis of
# AMR structures, at 'Net Expansion' level
#==============================================================================
#==============================================================================
# Query Builder Imports
#==============================================================================
#import query_builder.construct as construct
from query_builder.builders import *
from query_builder.element.net import Net
#==============================================================================
# CTR Set (CTR as Sparql querries)
#==============================================================================
rule_set = {}
# ---------------------------------------------
# Net Expansion: Degree Phenomena Net
# ---------------------------------------------
# TODO
# -- {composite_class_net.propagate_relations()}
# -- {logical_set_net.deprecate()}
# *** Complete clauses for net construction ***
# -- {composite_class_net.complete_clauses_for_construction(class_net_0.base_node)}
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: AMR CTR at 'Net Expansion' level
#------------------------------------------------------------------------------
# Module grouping compositional transduction rule_sets (CTR) for the analysis
# of AMR structures, at 'Net Expansion' level
#==============================================================================
if __name__ == '__main__':
import os, sys
LIB_PATH = f'{os.path.dirname(os.path.abspath(__file__))}/../../..'
sys.path.insert(0, os.path.abspath(LIB_PATH))
print(sys.path[0])
import transduction
import transduction.net as net
#==============================================================================
# Rule to analyze conjunction phenomena
#==============================================================================
# -- Net Instanciations
property_net = net.PropertyNet()
class_net_0 = net.ClassNet(0)
class_net_1 = net.ClassNet(1)
phenomena_net = net.PhenomenaNet()
or_composite_class_net = net.OrCompositeClassNet()
def analyze_phenomena_or():
rule = transduction.Rule(or_composite_class_net)
rule.add_composition_pattern(property_net, 'amr:role_ARG0', class_net_0)
rule.add_composition_pattern(property_net, 'amr:role_ARG1', phenomena_net)
rule.compose(class_net_0, property_net, phenomena_net)
return rule
#==============================================================================
# Development Test
#==============================================================================
if __name__ == '__main__':
print('\n' + ' *** Development Test ***')
print('\n -- Rule')
rule = analyze_phenomena_or()
num = 0
for query in rule.query_list:
num += 1
print(f'*** query {num} ***\n{query}\n')
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: AMR CTR at 'Net Expansion' level
#------------------------------------------------------------------------------
# Module grouping compositional transduction rule_sets (CTR) for the analysis of
# AMR structures, at 'Net Expansion' level
#==============================================================================
#==============================================================================
# Query Builder Imports
#==============================================================================
#import query_builder.construct as construct
from query_builder.builders import *
from query_builder.element.net import Net
#==============================================================================
# CTR Set (CTR as Sparql querries)
#==============================================================================
rule_set = {}
# ---------------------------------------------
# Net Expansion: Polarity Phenomena Application
# ---------------------------------------------
rule_set['polarity-phenomena-application'] = {
'label': "polarity-phenomena-application",
'comment': ("Apply polarity phenomena to: ",
" (1) polarity relation on a property class. "),
'construction': f"""
# !!! TODO: restriction for negative polarity !!!
{composite_property_net.construct(base_node=atom_property_net.base_node,
property_name='?newPropertyName')}
{composite_property_net.propagate_relations()}
{atom_property_net.deprecate()}
""",
'clause': f"""
# *** Identify Polarity(p0:Property, 'negative') ***
{atom_property_net.identify_net(core_role='true')}
{atom_property_net.identify_relation('amr:role_polarity', value_net.id)}
{value_net.identify_net(value_label="'negative'")}
# *** Identify variable label of base leaf ***
{atom_property_net.base_node} a amr:AMR_Leaf ;
amr:hasVariable ?variable.
?variable amr:label ?varLabel.
{composite_property_net.complete_clauses_for_construction(atom_property_net.base_node)}
""",
'binding': f"""
# -- New Property Name
BIND (CONCAT('not', '-', {atom_property_net.property_name}) AS ?newPropertyName).
{composite_property_net.bind_uri('?newPropertyName', '?varLabel')}
"""
}
This diff is collapsed.
from .rule import Rule
from .net import Net
from .class_net import ClassNet
from .atom_class_net import AtomClassNet
from .composite_class_net import CompositeClassNet
from .or_composite_class_net import OrCompositeClassNet
# from .and_composite_class_net import AndCompositeClassNet
from .property_net import PropertyNet
from .atom_property_net import AtomPropertyNet
from .composite_property_net import CompositePropertyNet
from .individual_net import IndividualNet
from .phenomena_net import PhenomenaNet
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Atom Class Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .class_net import ClassNet
#==============================================================================
# Net Class
#==============================================================================
class AtomClassNet(ClassNet):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'atomClass'
self.type_id = 'Atom_Class_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Atom Property Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .property_net import PropertyNet
#==============================================================================
# Net Class
#==============================================================================
class AtomPropertyNet(PropertyNet):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'atomProperty'
self.type_id = 'Atom_Property_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.core_role = f'{self.id}CoreRole'
self.target_argument_node = f'{self.id}TargetArgumentNode'
self.property_type = f'{self.id}PropertyType'
#self.property_name = f'{self.id}PropertyName'
self.property_name01 = f'{self.id}PropertyName01'
self.property_name10 = f'{self.id}PropertyName10'
self.property_name12 = f'{self.id}PropertyName12'
self.predicate_table.update({
'core_role': 'isCoreRoleLinked',
'target_argument_node': 'targetArgumentNode',
'property_type': 'hasPropertyType',
#'property_name': 'hasPropertyName',
'property_name01': 'hasPropertyName01',
'property_name10': 'hasPropertyName10',
'property_name12': 'hasPropertyName12'
})
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Class Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .net import Net
#==============================================================================
# Net Class
#==============================================================================
class ClassNet(Net):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num='', signature=False):
# -- Net Signature
if not signature:
self.type_name = 'class'
self.type_id = 'Class_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.class_name = f'{self.id}ClassName'
self.predicate_table.update({
'class_name': 'hasClassName'
})
#==============================================================================
# Development Test
#==============================================================================
if __name__ == '__main__':
print('\n' + ' *** Development Test ***')
print('\n' + ' -- test: Net')
net = AtomClassNet()
print(net)
print('\n' + ' -- test: update a test query')
test_query= f"""[...]
CONSTRUCT {{
{net.construct(base_node='?node1',
class_name='system')}
}}
WHERE {{
clause_1
clause_2
{net.complete_clauses_for_construction('?node1')}
{net.bind_uri('{{node1.concept_label}}',
'{{node1.variable_label}}')}
}}
"""
print(test_query)
print('\n' + ' *** - ***')
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Atom Class Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .class_net import ClassNet
#==============================================================================
# Net Class
#==============================================================================
class CompositeClassNet(ClassNet):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'compositeClass'
self.type_id = 'Composite_Class_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.node = f'{self.id}Node'
self.mother_class_net = f'{self.id}MotherClassNet'
self.restriction = f'{self.id}Restriction'
self.restriction01 = f'{self.id}Restriction01'
self.predicate_table.update({
'node': 'coverNode',
'mother_class_net': 'hasMotherClassNet',
'restriction': 'hasRestriction',
'restriction01': 'hasRestriction01'
})
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Composite Property Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .property_net import PropertyNet
#==============================================================================
# Net Class
#==============================================================================
class CompositePropertyNet(PropertyNet):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'compositeProperty'
self.type_id = 'Composite_Property_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.core_role = f'{self.id}CoreRole'
self.target_argument_node = f'{self.id}TargetArgumentNode'
self.property_type = f'{self.id}PropertyType'
self.restriction = f'{self.id}Restriction'
self.predicate_table.update({
'core_role': 'isCoreRoleLinked',
'target_argument_node': 'targetArgumentNode',
'property_type': 'hasPropertyType',
'restriction': 'hasRestriction'
})
#==============================================================================
# Development Test
#==============================================================================
if __name__ == '__main__':
print('\n' + ' *** Development Test ***')
print('\n' + ' -- test: Net')
net = AtomClassNet()
print(net)
print('\n' + ' -- test: update a test query')
test_query= f"""[...]
CONSTRUCT {{
{net.construct(base_node='?node1',
class_name='system')}
}}
WHERE {{
clause_1
clause_2
{net.complete_clauses_for_construction('?node1')}
{net.bind_uri('{{node1.concept_label}}',
'{{node1.variable_label}}')}
}}
"""
print(test_query)
print('\n' + ' *** - ***')
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Individual Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .net import Net
#==============================================================================
# Net Class
#==============================================================================
class IndividualNet(Net):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'individual'
self.type_id = 'Individual_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.base_class_name = f'{self.id}MotherClassNet'
self.mother_class_net = f'{self.id}MotherClassNet'
self.individual_label = f'{self.id}IndividualLabel'
self.predicate_table.update({
'base_class_name': 'hasBaseClassName',
'mother_class_net': 'hasMotherClassName',
'individual_label': 'hasIndividualLabel'
})
#--------------------------------------------------------------------------
# Method(s) to build 'Construct' parts
#--------------------------------------------------------------------------
def construct(self, **net_attribute):
query_code = super().construct(**net_attribute)
return query_code
#--------------------------------------------------------------------------
# Method(s) to build 'Clause' parts
#--------------------------------------------------------------------------
# --
#--------------------------------------------------------------------------
# Method(s) to build 'Binding' parts
#--------------------------------------------------------------------------
def bind_uri(self, net_name='nameless', node_reference='00'):
return super().bind_uri(net_name, node_reference)
#==============================================================================
# Development Test
#==============================================================================
if __name__ == '__main__':
print('\n' + ' *** Development Test ***')
print('\n' + ' -- test: Atom Class Net')
net = IndividualNet()
print(net)
print('\n' + ' -- test: update a test query')
test_query= f"""[...]
CONSTRUCT {{
{net.construct(base_node='?node1',
mother_class_net='?classNet',
individual_label='?valueLabel')}
}}
WHERE {{
clause_1
clause_2
{net.complete_clauses_for_construction('?node1')}
{net.bind_uri('{{node1.concept_label}}',
'{{node1.variable_label}}')}
}}
"""
print(test_query)
print('\n' + ' *** - ***')
\ No newline at end of file
#!/usr/bin/python3.10
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: Logical Set Net Query Builder
#------------------------------------------------------------------------------
# Class to generate SPARQL query parts related to semantic nets
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
from .net import Net
#==============================================================================
# Net Class
#==============================================================================
class LogicalSetNet(Net):
""" Class to generate SPARQL query parts related to semantic nets.
"""
#--------------------------------------------------------------------------
# Constructor(s)
#--------------------------------------------------------------------------
def __init__(self, num=''):
# -- Net Signature
self.type_name = 'logicalSet'
self.type_id = 'Logical_Set_Net'
self.id = f'?{self.type_name}Net{num}'
self.type_uri = f'net:{self.type_id}'
# -- Parent init
super().__init__(signature=True)
# -- Net Attributes
self.logical_constraint = f'{self.id}Restriction'
self.property_net = f'{self.id}PropertyNet'
self.content_net_1 = f'{self.id}ContentNet1'
self.content_net_2 = f'{self.id}ContentNet2'
self.content_net = f'{self.id}ContentNet'
self.naming = f'{self.id}Naming'
self.restriction = f'{self.id}Restriction'
self.predicate_table.update({
'logical_constraint': 'hasLogicalConstraint',
'property_net': 'bindPropertyNet',
'content_net_1': 'containsNet1',
'content_net_2': 'containsNet2',
'content_net': 'containsNet',
'naming': 'hasNaming',
'restriction': 'bindRestriction'
})
#--------------------------------------------------------------------------
# Private data accessor(s)
#--------------------------------------------------------------------------
def __get_predicate(self, attribute_reference):
predicate_reference = self.predicate_table[f'{attribute_reference}']
return f'net:{predicate_reference}'
#--------------------------------------------------------------------------
# Method(s) to build 'Construct' parts
#--------------------------------------------------------------------------
def define_naming(self):
predicate = self.__get_predicate('naming')
return f"""
# -- Naming Definition
{self.id} {predicate} {self.naming}."""
#--------------------------------------------------------------------------
# Method(s) to build 'Clause' parts
#--------------------------------------------------------------------------
def identify_content_net_1(self, target_id):
predicate = self.__get_predicate('content_net_1')
return f"""
# -- Identify content net 1
{self.id} {predicate} {target_id}."""
def identify_content_net_2(self, target_id):
predicate = self.__get_predicate('content_net_2')
return f"""
# -- Identify content net 2
{self.id} {predicate} {target_id}."""
def identify_content_net(self, target_id):
predicate = self.__get_predicate('content_net')
return f"""
# -- Identify content net
{self.id} {predicate} {target_id}."""
def identify_disjoint_content_net(self, target_id_1, target_id_2):
predicate1 = self.__get_predicate('content_net_1')
predicate2 = self.__get_predicate('content_net_2')
return f"""
# -- Identify disjoint content net
{self.id} {predicate} {target_id_1}.
{self.id} {predicate} {target_id_2}.
FILTER ( {target_id_1} != {target_id_2} )."""
def identify_content_net_number(self, number_ref):
predicate = self.__get_predicate('content_net')
return f"""
{{
# -- Identify content net number
SELECT (COUNT(?contentNet) AS {number_ref})
WHERE {{
{self.id} {predicate} ?contentNet.
}}
}}"""
def identify_restriction(self, target_id):
predicate = self.__get_predicate('restriction')
return f"""
# -- Identify restriction
{self.id} {predicate} {target_id}."""
#--------------------------------------------------------------------------
# Method(s) to build 'Binding' parts
#--------------------------------------------------------------------------
def bind_naming(self, property_ref, content_1_ref, content_2_ref):
ref1 = f"{self.id}Ref1"
ref2 = f"{self.id}Ref2"
# -- ref3 = f"{self.id}Ref3"
return f"""
# -- New Naming
{self.identify_content_net_number('?contentNumber')}
BIND (CONCAT({property_ref}, '-', {content_1_ref}) AS {ref1}).
BIND (CONCAT({ref1}, '-and-', {content_2_ref}) AS {ref2}).
BIND (IF (?contentNumber > 2, CONCAT({ref2}, '-etc'), {ref2}) AS {self.naming})."""
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment