Select Git revision
phpstan-tests.neon
prepare_work_data.py 8.37 KiB
#!/usr/bin/python3.5
# -*-coding:Utf-8 -*
#==============================================================================
# TENET: prepare work data
#------------------------------------------------------------------------------
# Prepare work data for extraction processing.
#==============================================================================
#==============================================================================
# Importing required modules
#==============================================================================
import glob
from rdflib import Graph, Namespace, URIRef
#==============================================================================
# Parameters
#==============================================================================
# Working directories
CONFIG_DIR = "config/"
FRAME_DIR = "frame/"
CORPUS_DIR = "corpus/"
CTS_DIR = "cts/"
OUTPUT_DIR = "output/"
# Config Definition
TURTLE_SUFFIX = ".ttl"
ONTO_FILE = "-ontology" + TURTLE_SUFFIX
dash_file = "dash-data-shapes.ttl" # data from "http://datashapes.org/dash.ttl"
schema_file = "unl-rdf-schema.ttl"
semantic_net_file = "semantic-net.ttl"
cts_file = "transduction-schemes.ttl"
c_param_file = "config-parameters.ttl"
# Dev Tests
base_uri = "https://unsel.tetras-libre.fr/tenet/working"
req_100 = "CCTP-SRSA-IP-20210831-R100/"
req_200 = "CCTP-SRSA-IP-20210831-R200/"
req_300 = "CCTP-SRSA-IP-20210831-R300/"
req_1100 = "CCTP-SRSA-IP-20210831-R1100/"
corpus_40 = "CCTP-SRSA-IP-20210831/"
corpus_ERTMS = "ERTMS/"
corpus_PEV = "PEV-RSE-Approach/"
#==============================================================================
# Utility
#==============================================================================
def read_query(cts_group, query_ref):
query_file = CTS_DIR + cts_group + str(query_ref) + ".cts"
with open(query_file, "r") as file:
return file.read()
#==============================================================================
# Graph Initialization
#==============================================================================
def load_config(work_graph):
print("-- Configuration Loading:")
file_ref = CONFIG_DIR + schema_file
work_graph.parse(file_ref)
print("----- RDF Schema (" + str(len(work_graph)) + ")")
file_ref = CONFIG_DIR + semantic_net_file
work_graph.parse(file_ref)
print("----- Semantic Net Definition (" + str(len(work_graph)) + ")")
file_ref = CONFIG_DIR + dash_file
work_graph.parse(file_ref)
print("----- Data Shapes Dash (" + str(len(work_graph)) + ")")
file_ref = CONFIG_DIR + c_param_file
work_graph.parse(file_ref)
print("----- Config Parameter Definition (" + str(len(work_graph)) + ")")
def load_frame(work_graph, target_system):
print("-- Frame Ontology Loading:")
# -- old --- file_ref = FRAME_DIR + req_onto_file
# -- old --- work_graph.parse(file_ref)
# -- old --- print("----- Requirement Frame Ontology (" + str(len(work_graph)) + ")")
file_ref = FRAME_DIR + target_system + ONTO_FILE
work_graph.parse(file_ref)
print("----- System Frame Ontology (" + str(len(work_graph)) + ")")
# -- old --- file_ref = FRAME_DIR + f_param_file
# -- old --- work_graph.parse(file_ref)
# -- old --- print("----- Ontology Parameters (" + str(len(work_graph)) + ")")
#def define_namespace(work_graph):
# print("-- Namespace Definition:")
#
# sys_uri = "https://unsel.tetras-libre.fr/tenet/frame/system-ontology/"
# concept_classes = ["agent"]
# for concept in concept_classes:
# new_prefix = "sys-" + concept
# new_uri = URIRef(sys_uri + concept + '#')
# work_graph.namespace_manager.bind(new_prefix, new_uri)
# print("----- " + new_prefix + ": " + new_uri)
# print(list(work_graph.namespace_manager.namespaces()))
def load_sentences(work_graph, corpus):
print("-- Sentence Loading:")
target_ref = CORPUS_DIR + corpus + '**/*.ttl'
for file_ref in glob.glob(target_ref, recursive = True):
work_graph.parse(file_ref)
print("----- " + file_ref + " (" + str(len(work_graph)) + ")")
#==============================================================================
# CT Schemes for Transduction Process
#==============================================================================
def load_cts(work_graph):
print("-- CTS Loading:")
file_ref = CONFIG_DIR + cts_file
work_graph.parse(file_ref)
print("----- All Schemes (" + str(len(work_graph)) + ")")
#==============================================================================
# Result (export)
#==============================================================================
def export_result(work_graph, export_ref, export_file):
print("-- Export result as turtle: " + export_file)
work_graph.serialize(destination=export_file,
base=base_uri + '/' + export_ref,
format='turtle')
def finalize_export_file(export_file):
""" finalize the export file by adding some useful prefixes """
with open(export_file, "rt") as file:
x = file.read()
with open(export_file, "wt") as file:
x = x.replace(
"@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .",
"""
@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .
@prefix sys-Event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/Event#> .
@prefix sys-State_Property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/State_Property#> .
@prefix sys-abstract_thing: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/abstract_thing#> .
@prefix sys-action_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/action_verb#> .
@prefix sys-agent: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/agent#> .
@prefix sys-attributive_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/attributive_verb#> .
@prefix sys-component: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/component#> .
@prefix sys-message: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/message#> .
@prefix sys-place: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/place#> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix sys-relation: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/relation/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
""")
file.write(x)
#==============================================================================
# Main Function
#==============================================================================
def run(corpus, source_ref, target_ref):
try:
print("[Tenet] Prepare work data from corpus " + corpus)
print("\n" + "- Graph Initialization")
work_graph = Graph()
load_config(work_graph)
load_frame(work_graph, target_ref)
#define_namespace(work_graph)
print("\n" + "- Preparation of Transduction Process")
load_cts(work_graph)
print("\n" + "- Data Source Imports")
load_sentences(work_graph, corpus)
print("\n" + "- Result")
output_file = OUTPUT_DIR + source_ref + TURTLE_SUFFIX
export_result(work_graph, source_ref, output_file)
finalize_export_file(output_file)
print()
except:
print("!!! An exception occurred !!!")
#==============================================================================
# Execution
#==============================================================================
if __name__ == '__main__':
# -- Data for Qivalio POC
#target_ref = "environment"
target_ref = "system"
run(corpus_PEV, 'Corpus-PEV', target_ref)
# Data for Master
#target_ref = "system"
#run(req_100, 'R100f', target_ref)
# run(req_200, 'R200f', target_ref)
# run(req_300, 'R300f', target_ref)
# run(req_1100, 'R1100f', target_ref)
# run(corpus_40, 'Corpus-CCTP-40f', target_ref)
#run(corpus_ERTMS, 'Corpus-ERTMS', target_ref)