Skip to content
Snippets Groups Projects
Commit 4cfa39bd authored by David Rouquet's avatar David Rouquet
Browse files

Version avec verif pour les phrases venant d'UNseL Inter

parent d2a232c9
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id:160ab8ab-091e-4b00-b63a-c3746c71c540 tags: %% Cell type:code id:160ab8ab-091e-4b00-b63a-c3746c71c540 tags:
``` python ``` python
import panel as pn import panel as pn
import param import param
from glob import glob from glob import glob
import base64 import base64
import re import re
import requests import requests
from subprocess import Popen, PIPE, STDOUT from subprocess import Popen, PIPE, STDOUT
import html import html
from cairosvg import svg2png from cairosvg import svg2png
import graphviz import graphviz
from rdflib import Graph, Namespace, URIRef from rdflib import Graph, Namespace, URIRef
import os import os
from collections import OrderedDict from collections import OrderedDict
from urllib.request import urlopen from urllib.request import urlopen
import uuid import uuid
import shutil import shutil
from SPARQLWrapper import SPARQLWrapper, JSON from SPARQLWrapper import SPARQLWrapper, JSON
pn.extension(comms='ipywidgets') pn.extension(comms='ipywidgets')
``` ```
%% Cell type:code id:7b4685d7-698d-4a86-a0a4-a81d337bc9d7 tags: %% Cell type:code id:7b4685d7-698d-4a86-a0a4-a81d337bc9d7 tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# Parameters # Parameters
####################################################################################################### #######################################################################################################
shaclBase = '/opt/dashboards/tools/shacl-1.3.2/bin/' shaclBase = '/opt/dashboards/tools/shacl-1.3.2/bin/'
owl2vowlPath = '/opt/dashboards/tools/owl2vowl_0.3.7/owl2vowl.jar' owl2vowlPath = '/opt/dashboards/tools/owl2vowl_0.3.7/owl2vowl.jar'
storeBase = '../store/CCTP-SRSA-IP-20210831/' storeBase = '../store/CCTP-SRSA-IP-20210831/'
extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/work_graph.ttl' # -- old --- extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/extraction-data-9.ttl' extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/work_graph.ttl' # -- old --- extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/extraction-data-9.ttl'
workDir = 'work-data/' workDir = 'work-data/'
webvowlData = '/opt/webvowl' webvowlData = '/opt/webvowl'
pane_width = 1300 pane_width = 1300
# Fuseki # Fuseki
fusekiBase = "https://fuseki.unsel.tetras-lab.io/unsel/" fusekiBase = "https://fuseki.unsel.tetras-lab.io/unsel/"
sparqlQuery = SPARQLWrapper(fusekiBase+'query',returnFormat=JSON) sparqlQuery = SPARQLWrapper(fusekiBase+'query',returnFormat=JSON)
sparqlUpdate = SPARQLWrapper(fusekiBase+'update') sparqlUpdate = SPARQLWrapper(fusekiBase+'update')
sparqlUpdate.method = 'POST' sparqlUpdate.method = 'POST'
sparqlUpdate.setCredentials("admin", "wezW3EHTH4LfEdaKtnC9errLH1YwVXssIO6DUfnjGAHuBApSfvDb4R1uDX5JmSVK") sparqlUpdate.setCredentials("admin", "wezW3EHTH4LfEdaKtnC9errLH1YwVXssIO6DUfnjGAHuBApSfvDb4R1uDX5JmSVK")
sparqlUpdate.setQuery("DELETE {?s ?p ?o} WHERE {?s ?p ?o}") sparqlUpdate.setQuery("DELETE {?s ?p ?o} WHERE {?s ?p ?o}")
sparqlLog = sparqlUpdate.query() sparqlLog = sparqlUpdate.query()
``` ```
%% Cell type:code id:2c41c319-4beb-4a85-a232-61a12d00cdbf tags: %% Cell type:code id:2c41c319-4beb-4a85-a232-61a12d00cdbf tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# UNL tools functions # UNL tools functions
####################################################################################################### #######################################################################################################
def unl2stuff(unlFilePath, jarPath, outPrefix, outType): def unl2stuff(unlFilePath, jarPath, outPrefix, outType):
# Run java parser # Run java parser
cmd = ['java', '-jar', jarPath, cmd = ['java', '-jar', jarPath,
'--input-file', unlFilePath, '--input-file', unlFilePath,
'--output-file', outPrefix, '--output-file', outPrefix,
'--output-type', outType] '--output-type', outType]
with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p: with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:
p.wait() p.wait()
p.stdout.flush() p.stdout.flush()
if p.returncode != 0: if p.returncode != 0:
print("Error in unl2rdf: \n\n"+p.stdout.read().decode()) print("Error in unl2rdf: \n\n"+p.stdout.read().decode())
print('UNL;') print('UNL;')
#print(text) #print(text)
def unl2dotWeb(unldata) : def unl2dotWeb(unldata) :
data={'unl': unldata, 'outputs':['dot', 'svg', 'rdf']} data={'unl': unldata, 'outputs':['dot', 'svg', 'rdf']}
try: try:
r = requests.post('https://unl.demo.tetras-libre.fr/unl2rdf', data=data) r = requests.post('https://unl.demo.tetras-libre.fr/unl2rdf', data=data)
except Exception as e: except Exception as e:
return 'Error calling https://unl.demo.tetras-libre.fr/unl2rdf : "{error}"'.format(error=e) return 'Error calling https://unl.demo.tetras-libre.fr/unl2rdf : "{error}"'.format(error=e)
html=r.text html=r.text
# On utilise une regex au lieu de parser le html car ce dernier est mal formé # On utilise une regex au lieu de parser le html car ce dernier est mal formé
regexSvg = re.compile('<svg.*svg>',re.MULTILINE|re.DOTALL) regexSvg = re.compile('<svg.*svg>',re.MULTILINE|re.DOTALL)
regexRdf = re.compile("<code id='rdf' class='collapse show'>(.*?)</code>",re.MULTILINE|re.DOTALL) regexRdf = re.compile("<code id='rdf' class='collapse show'>(.*?)</code>",re.MULTILINE|re.DOTALL)
try : try :
svg = regexSvg.search(html).group() svg = regexSvg.search(html).group()
rdf = regexRdf.search(html).group(1) rdf = regexRdf.search(html).group(1)
except Exception as e : except Exception as e :
svg = '' svg = ''
rdf = '' rdf = ''
print(e) print(e)
return(svg, rdf) return(svg, rdf)
def zipdir(path, ziph): def zipdir(path, ziph):
# ziph is zipfile handle # ziph is zipfile handle
for root, dirs, files in os.walk(path): for root, dirs, files in os.walk(path):
for file in files: for file in files:
if not('orig' in root): if not('orig' in root):
ziph.write(os.path.join(root, file), ziph.write(os.path.join(root, file),
os.path.relpath(os.path.join(root, file), os.path.relpath(os.path.join(root, file),
os.path.join(path, '..'))) os.path.join(path, '..')))
def addBaseUri(rdfStr): def addBaseUri(rdfStr):
regexBaseUri = re.compile("http://rdf-unl.org.*?sentence.*?ontology") regexBaseUri = re.compile("http://rdf-unl.org.*?sentence.*?ontology")
baseUri = regexBaseUri.search(rdfStr).group() baseUri = regexBaseUri.search(rdfStr).group()
rdfStr = "# baseURI: "+baseUri+"\n"+rdfStr rdfStr = "# baseURI: "+baseUri+"\n"+rdfStr
return(rdfStr) return(rdfStr)
def postEditRdf(rdfPath, frStr, enStr): def postEditRdf(rdfPath, frStr, enStr):
textID = rdfPath.rsplit('/', 1)[0] textID = rdfPath.rsplit('/', 1)[0]
newPrefix = "http://unsel.rdf-unl.org/"+textID newPrefix = "http://unsel.rdf-unl.org/"+textID
with open(rdfPath,'r') as rdfFile : with open(rdfPath,'r') as rdfFile :
rdfStr = rdfFile.read() rdfStr = rdfFile.read()
rdfFile.close() rdfFile.close()
regexBaseUri = re.compile("http://rdf-unl.org.*?sentence.*?ontology") regexBaseUri = re.compile("http://rdf-unl.org.*?sentence.*?ontology")
rdfStr = rdfStr.replace('rdfs:label "TBD : phrase en langue naturelle"@inv ;', rdfStr = rdfStr.replace('rdfs:label "TBD : phrase en langue naturelle"@inv ;',
'<https://unl.tetras-libre.fr/rdf/schema#has_id> "{}" ;\n'.format(textID.split('/')[-2])+'rdfs:label """{}"""@fr ;\n'.format(frStr)+' rdfs:label """{}"""@en ;\n'.format(enStr)) '<https://unl.tetras-libre.fr/rdf/schema#has_id> "{}" ;\n'.format(textID.split('/')[-2])+'rdfs:label """{}"""@fr ;\n'.format(frStr)+' rdfs:label """{}"""@en ;\n'.format(enStr))
baseUri = regexBaseUri.search(rdfStr).group() baseUri = regexBaseUri.search(rdfStr).group()
oldPrefix = baseUri.rsplit('/', 1)[0] oldPrefix = baseUri.rsplit('/', 1)[0]
rdfStr = rdfStr.replace(oldPrefix+'#ontology', newPrefix.rsplit('/', 1)[0]+'#ontology') rdfStr = rdfStr.replace(oldPrefix+'#ontology', newPrefix.rsplit('/', 1)[0]+'#ontology')
rdfStr = rdfStr.replace(oldPrefix+'#', "http://unsel.rdf-unl.org/uw_lexeme#") rdfStr = rdfStr.replace(oldPrefix+'#', "http://unsel.rdf-unl.org/uw_lexeme#")
rdfStr = "# baseURI: "+baseUri+"\n @prefix : <"+baseUri.replace("ontology","")+"> .\n"+rdfStr rdfStr = "# baseURI: "+baseUri+"\n @prefix : <"+baseUri.replace("ontology","")+"> .\n"+rdfStr
rdfStr = rdfStr.replace(oldPrefix, newPrefix) rdfStr = rdfStr.replace(oldPrefix, newPrefix)
with open(rdfPath,'w') as rdfFile : with open(rdfPath,'w') as rdfFile :
rdfStr = rdfFile.write(rdfStr) rdfStr = rdfFile.write(rdfStr)
rdfFile.close() rdfFile.close()
def replaceInplace(filePath, searchText, replaceText): def replaceInplace(filePath, searchText, replaceText):
#read input file #read input file
fin = open(filePath, "rt") fin = open(filePath, "rt")
#read file contents to string #read file contents to string
data = fin.read() data = fin.read()
#replace all occurrences of the required string #replace all occurrences of the required string
data = data.replace(searchText, replaceText) data = data.replace(searchText, replaceText)
#close the input file #close the input file
fin.close() fin.close()
#open the input file in write mode #open the input file in write mode
fin = open(filePath, "wt") fin = open(filePath, "wt")
#overrite the input file with the resulting data #overrite the input file with the resulting data
fin.write(data) fin.write(data)
#close the file #close the file
fin.close() fin.close()
def createStoreDirectory(unlStr, srsaRef): def createStoreDirectory(unlStr, srsaRef):
storeDir = storeBase+srsaRef+"/current/" storeDir = storeBase+srsaRef+"/current/"
regexFr = re.compile("{org:fr}\n(.*?)\n{/org}",re.MULTILINE|re.DOTALL) regexFr = re.compile("{org:fr}\n(.*?)\n{/org}",re.MULTILINE|re.DOTALL)
try: try:
frStr = regexFr.search(unlStr).group(1) frStr = regexFr.search(unlStr).group(1)
except AttributeError: except AttributeError:
frStr = '' frStr = ''
enStr = '' enStr = ''
# Create a directory named after 'Référence' # Create a directory named after 'Référence'
try : try :
os.makedirs(storeDir) os.makedirs(storeDir)
except FileExistsError: except FileExistsError:
pass pass
# Add english translation to unl code # Add english translation to unl code
unlStr = unlStr.replace("{/org}", "{{/org}}\n{{en}}\n{enStr}\n{{/en}}".format(enStr=enStr)) unlStr = unlStr.replace("{/org}", "{{/org}}\n{{en}}\n{enStr}\n{{/en}}".format(enStr=enStr))
# Write UNL code to a file # Write UNL code to a file
with open(storeDir+srsaRef+'.unl','w') as unlFile: with open(storeDir+srsaRef+'.unl','w') as unlFile:
unlFile.write(unlStr) unlFile.write(unlStr)
os.chmod(storeDir+srsaRef+'.unl',0o766) os.chmod(storeDir+srsaRef+'.unl',0o766)
# Send UNL code to https://unl.demo.tetras-libre.fr/unl2rdf to get SVG and RDF # Send UNL code to https://unl.demo.tetras-libre.fr/unl2rdf to get SVG and RDF
#svg, rdf = unl2dotWeb(unlStr) #svg, rdf = unl2dotWeb(unlStr)
# Use unltools jar to create ttl and dot file from unl # Use unltools jar to create ttl and dot file from unl
unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'rdf') unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'rdf')
postEditRdf(storeDir+srsaRef+'.ttl', frStr, enStr) postEditRdf(storeDir+srsaRef+'.ttl', frStr, enStr)
unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'dot') unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'dot')
# Generate svg and png # Generate svg and png
graphviz.render('dot', 'svg', storeDir+srsaRef+'.dot') graphviz.render('dot', 'svg', storeDir+srsaRef+'.dot')
graphviz.render('dot', 'png', storeDir+srsaRef+'.dot') graphviz.render('dot', 'png', storeDir+srsaRef+'.dot')
# Rename generated svg and png so they are not named like file.dot.svg # Rename generated svg and png so they are not named like file.dot.svg
svgList = glob(storeDir+srsaRef+"*.svg") svgList = glob(storeDir+srsaRef+"*.svg")
for svgPath in svgList: for svgPath in svgList:
svgNewPath = svgPath.replace(".dot","") svgNewPath = svgPath.replace(".dot","")
os.rename(svgPath, svgNewPath) os.rename(svgPath, svgNewPath)
pngList = glob(storeDir+srsaRef+"*.png") pngList = glob(storeDir+srsaRef+"*.png")
for pngPath in pngList: for pngPath in pngList:
pngNewPath = pngPath.replace(".dot","") pngNewPath = pngPath.replace(".dot","")
os.rename(pngPath, pngNewPath) os.rename(pngPath, pngNewPath)
# Add full text sentences to the svg # Add full text sentences to the svg
replaceInplace(storeDir+srsaRef+'.svg', '</svg>','''<text x="0" y="-40">fr : {fr}</text> replaceInplace(storeDir+srsaRef+'.svg', '</svg>','''<text x="0" y="-40">fr : {fr}</text>
<text x="0" y="-20">en : {en}</text> <text x="0" y="-20">en : {en}</text>
</svg>'''.format(fr=frStr, en=enStr)) </svg>'''.format(fr=frStr, en=enStr))
#svgWithTxt = svg.replace('</svg>','''<text x="0" y="-40">fr : {fr}</text> #svgWithTxt = svg.replace('</svg>','''<text x="0" y="-40">fr : {fr}</text>
#<text x="0" y="-20">en : {en}</text> #<text x="0" y="-20">en : {en}</text>
#</svg>'''.format(fr=frStr, en=enStr)) #</svg>'''.format(fr=frStr, en=enStr))
#with open(storeDir+srsaRef+'.svg','w') as svgFile: #with open(storeDir+srsaRef+'.svg','w') as svgFile:
# svgFile.write(svgWithTxt) # svgFile.write(svgWithTxt)
#os.chmod(storeDir+srsaRef+'.svg',0o766) #os.chmod(storeDir+srsaRef+'.svg',0o766)
#with open(storeDir+srsaRef+'.ttl','w') as rdfFile: #with open(storeDir+srsaRef+'.ttl','w') as rdfFile:
# rdfFile.write(rdf) # rdfFile.write(rdf)
os.chmod(storeDir+srsaRef+'.ttl',0o766) os.chmod(storeDir+srsaRef+'.ttl',0o766)
os.chmod(storeDir+srsaRef+'.svg',0o766) os.chmod(storeDir+srsaRef+'.svg',0o766)
os.chmod(storeDir+srsaRef+'.png',0o766) os.chmod(storeDir+srsaRef+'.png',0o766)
os.chmod(storeDir+srsaRef+'.dot',0o766) os.chmod(storeDir+srsaRef+'.dot',0o766)
os.chmod(storeDir+srsaRef+'.unl',0o766) os.chmod(storeDir+srsaRef+'.unl',0o766)
# Convert svg to png and write to a file # Convert svg to png and write to a file
#try: #try:
# svg2png(bytestring=svgWithTxt, write_to=storeDir+srsaRef+'.png') # svg2png(bytestring=svgWithTxt, write_to=storeDir+srsaRef+'.png')
#except : #except :
# pass # pass
shutil.copytree(storeDir, storeBase+srsaRef+"/orig/") shutil.copytree(storeDir, storeBase+srsaRef+"/orig/")
with open(storeBase+srsaRef+"/current/"+srsaRef+'.comments','w') as commentFile: with open(storeBase+srsaRef+"/current/"+srsaRef+'.comments','w') as commentFile:
commentFile.write("[David] : Saisissez vos commentaires en commençant par votre nom, n'oubliez pas d'enregistrer : ") commentFile.write("[David] : Saisissez vos commentaires en commençant par votre nom, n'oubliez pas d'enregistrer : ")
os.chmod(storeBase+srsaRef+"/current/"+srsaRef+'.comments',0o766) os.chmod(storeBase+srsaRef+"/current/"+srsaRef+'.comments',0o766)
def writeUnlFiles(unlStr, storePrefix): def writeUnlFiles(unlStr, storePrefix):
srsaRef = selectDir.value srsaRef = selectDir.value
with open(storePrefix+'.unl','w') as unlFile: with open(storePrefix+'.unl','w') as unlFile:
unlFile.write(unlStr) unlFile.write(unlStr)
unlFile.close() unlFile.close()
def createFolderFromUnselInter(srsaRef): def createFolderFromUnselInter(srsaRef):
url = 'https://lingwarium.org/UNseL-inter/GrapheUNL.txt' url = 'https://lingwarium.org/UNseL-inter/GrapheUNL.txt'
unlStr = urlopen(url).read().decode('utf-8').replace('[P:1]','').replace('[/P]','') unlStr = urlopen(url).read().decode('utf-8').replace('[P:1]','').replace('[/P]','')
createStoreDirectory(unlStr, srsaRef) createStoreDirectory(unlStr, srsaRef)
``` ```
%% Cell type:code id:7b32d69a-52fb-4b9d-8cd9-5fb45c177284 tags: %% Cell type:code id:7b32d69a-52fb-4b9d-8cd9-5fb45c177284 tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# Extraction # Extraction
####################################################################################################### #######################################################################################################
def run_command(cmd): def run_command(cmd):
with Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) as p: with Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) as p:
p.poll() p.poll()
p.stdout.flush() p.stdout.flush()
p.stderr.flush() p.stderr.flush()
stdout, stderr = p.communicate() stdout, stderr = p.communicate()
return p.returncode, stdout, stderr return p.returncode, stdout, stderr
# Run SHACL infer sh script. mode argument can take the values 'infer' or 'validate' # Run SHACL infer sh script. mode argument can take the values 'infer' or 'validate'
def shaclInfer(ttlPath, mode, ttlRulesPath = ''): def shaclInfer(ttlPath, mode, ttlRulesPath = ''):
if ttlRulesPath == '': if ttlRulesPath == '':
cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath] cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath]
else: else:
cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath, '-shapesfile', ttlRulesPath] cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath, '-shapesfile', ttlRulesPath]
#cmd = ' '.join(cmd) #cmd = ' '.join(cmd)
#!{cmd} #!{cmd}
code, out, err = run_command(cmd) code, out, err = run_command(cmd)
if code != 0: if code != 0:
print("Error in SHACL script: \n\n"+err) print("Error in SHACL script: \n\n"+err)
else: else:
return(out) return(out)
def export_result(g): def export_result(g):
export_file = 'output.ttl' export_file = 'output.ttl'
g.serialize(destination=export_file, base=base_uri, format='turtle') g.serialize(destination=export_file, base=base_uri, format='turtle')
#shaclInfer('/opt/dashboards/tetras-lab-unl-demos/demo-cctp-40.ttl', 'infer') #shaclInfer('/opt/dashboards/tetras-lab-unl-demos/demo-cctp-40.ttl', 'infer')
``` ```
%% Cell type:code id:5c7164d7-c074-4aa3-9776-0cc5cc8f18f7 tags: %% Cell type:code id:5c7164d7-c074-4aa3-9776-0cc5cc8f18f7 tags:
``` python ``` python
#============================================================================== #==============================================================================
# TENET: prepare work data # TENET: prepare work data
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
# Prepare work data for extraction processing. # Prepare work data for extraction processing.
#============================================================================== #==============================================================================
#============================================================================== #==============================================================================
# Parameters # Parameters
#============================================================================== #==============================================================================
# Working directories # Working directories
CONFIG_DIR = "/opt/dashboards/tools/tenet/config/" CONFIG_DIR = "/opt/dashboards/tools/tenet/config/"
FRAME_DIR = "/opt/dashboards/tools/tenet/frame/" FRAME_DIR = "/opt/dashboards/tools/tenet/frame/"
CORPUS_DIR = storeBase CORPUS_DIR = storeBase
OUTPUT_DIR = "output/" OUTPUT_DIR = "output/"
# Config Definition # Config Definition
TURTLE_SUFFIX = ".ttl" TURTLE_SUFFIX = ".ttl"
frame_file = "system-ontology.ttl" frame_file = "system-ontology.ttl"
dash_file = "dash-data-shapes.ttl" # data from "http://datashapes.org/dash.ttl" dash_file = "dash-data-shapes.ttl" # data from "http://datashapes.org/dash.ttl"
schema_file = "unl-rdf-schema.ttl" schema_file = "unl-rdf-schema.ttl"
semantic_net_file = "semantic-net.ttl" semantic_net_file = "semantic-net.ttl"
cts_file = "transduction-schemes.ttl" cts_file = "transduction-schemes.ttl"
c_param_file = "config-parameters.ttl" c_param_file = "config-parameters.ttl"
# Dev Tests # Dev Tests
base_uri = "https://unsel.tetras-libre.fr/tenet/working" base_uri = "https://unsel.tetras-libre.fr/tenet/working"
#============================================================================== #==============================================================================
# Graph Initialization # Graph Initialization
#============================================================================== #==============================================================================
def load_config(work_graph): def load_config(work_graph):
file_ref = CONFIG_DIR + schema_file file_ref = CONFIG_DIR + schema_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
file_ref = CONFIG_DIR + semantic_net_file file_ref = CONFIG_DIR + semantic_net_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
file_ref = CONFIG_DIR + dash_file file_ref = CONFIG_DIR + dash_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
file_ref = CONFIG_DIR + c_param_file file_ref = CONFIG_DIR + c_param_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
def load_frame(work_graph): def load_frame(work_graph):
file_ref = FRAME_DIR + frame_file file_ref = FRAME_DIR + frame_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
#def define_namespace(work_graph): #def define_namespace(work_graph):
# print("-- Namespace Definition:") # print("-- Namespace Definition:")
# #
# sys_uri = "https://unsel.tetras-libre.fr/tenet/frame/system-ontology/" # sys_uri = "https://unsel.tetras-libre.fr/tenet/frame/system-ontology/"
# concept_classes = ["agent"] # concept_classes = ["agent"]
# for concept in concept_classes: # for concept in concept_classes:
# new_prefix = "sys-" + concept # new_prefix = "sys-" + concept
# new_uri = URIRef(sys_uri + concept + '#') # new_uri = URIRef(sys_uri + concept + '#')
# work_graph.namespace_manager.bind(new_prefix, new_uri) # work_graph.namespace_manager.bind(new_prefix, new_uri)
# print("----- " + new_prefix + ": " + new_uri) # print("----- " + new_prefix + ": " + new_uri)
# print(list(work_graph.namespace_manager.namespaces())) # print(list(work_graph.namespace_manager.namespaces()))
def load_sentences(work_graph, corpus): def load_sentences(work_graph, corpus):
target_ref = CORPUS_DIR + corpus + '/current/*.ttl' target_ref = CORPUS_DIR + corpus + '/current/*.ttl'
for file_ref in glob(target_ref): for file_ref in glob(target_ref):
if 'factoid' not in file_ref : if 'factoid' not in file_ref :
work_graph.parse(file_ref) work_graph.parse(file_ref)
def load_cts(work_graph): def load_cts(work_graph):
file_ref = CONFIG_DIR + cts_file file_ref = CONFIG_DIR + cts_file
work_graph.parse(file_ref) work_graph.parse(file_ref)
#============================================================================== #==============================================================================
# Result (export) # Result (export)
#============================================================================== #==============================================================================
def export_result(work_graph, export_ref, export_file): def export_result(work_graph, export_ref, export_file):
work_graph.serialize(destination=export_file, work_graph.serialize(destination=export_file,
base=base_uri + '/' + export_ref, base=base_uri + '/' + export_ref,
format='turtle') format='turtle')
def finalize_export_file(export_file): def finalize_export_file(export_file):
""" finalize the export file by adding some useful prefixes """ """ finalize the export file by adding some useful prefixes """
with open(export_file, "rt") as file: with open(export_file, "rt") as file:
x = file.read() x = file.read()
with open(export_file, "wt") as file: with open(export_file, "wt") as file:
x = x.replace( x = x.replace(
"@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .", "@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .",
""" """
@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> . @prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .
@prefix sys-class: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/class/> . @prefix sys-class: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/class/> .
@prefix sys-property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/property/> . @prefix sys-property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/property/> .
@prefix sys-relation: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/relation/> . @prefix sys-relation: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/relation/> .
@prefix sys-Event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/Event#> . @prefix sys-Event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/Event#> .
@prefix sys-event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/eventObjectProperty#> . @prefix sys-event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/eventObjectProperty#> .
@prefix sys-State_Property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/State_Property#> . @prefix sys-State_Property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/State_Property#> .
@prefix sys-stateProperty: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/statePropertyObjectProperty#> . @prefix sys-stateProperty: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/statePropertyObjectProperty#> .
@prefix sys-abstract_thing: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/abstract_thing#> . @prefix sys-abstract_thing: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/abstract_thing#> .
@prefix sys-action_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/action_verb#> . @prefix sys-action_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/action_verb#> .
@prefix sys-agent: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/agent#> . @prefix sys-agent: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/agent#> .
@prefix sys-attributive_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/attributive_verb#> . @prefix sys-attributive_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/attributive_verb#> .
@prefix sys-component: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/component#> . @prefix sys-component: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/component#> .
@prefix sys-message: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/message#> . @prefix sys-message: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/message#> .
@prefix sys-place: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/place#> . @prefix sys-place: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/place#> .
""") """)
file.write(x) file.write(x)
#============================================================================== #==============================================================================
# Main Function # Main Function
#============================================================================== #==============================================================================
def createTenetGraph(corpus): def createTenetGraph(corpus):
try: try:
work_graph = Graph() work_graph = Graph()
load_config(work_graph) load_config(work_graph)
load_frame(work_graph) load_frame(work_graph)
#define_namespace(work_graph) #define_namespace(work_graph)
load_cts(work_graph) load_cts(work_graph)
load_sentences(work_graph, corpus) load_sentences(work_graph, corpus)
output_file = extractionGraph output_file = extractionGraph
export_result(work_graph, corpus, output_file) export_result(work_graph, corpus, output_file)
finalize_export_file(output_file) finalize_export_file(output_file)
return(work_graph) return(work_graph)
except Exception as e : except Exception as e :
print("!!! An exception occurred importing rdf graphs for extraction !!!\n"+str(e)) print("!!! An exception occurred importing rdf graphs for extraction !!!\n"+str(e))
def addSentenceInTenetGraph(work_graph, sentence_ref): def addSentenceInTenetGraph(work_graph, sentence_ref):
""" TODO: add a sentence to work_graph """ """ TODO: add a sentence to work_graph """
pass pass
``` ```
%% Cell type:code id:f66bfcd2-f2b9-4603-b1f2-d4fb643c8c3c tags: %% Cell type:code id:f66bfcd2-f2b9-4603-b1f2-d4fb643c8c3c tags:
``` python ``` python
clearExecutionInstances = """ clearExecutionInstances = """
PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#> PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
DELETE {?x rdf:type ?c} DELETE {?x rdf:type ?c}
WHERE { WHERE {
?c rdfs:subClassOf* cts:Transduction_Schemes . ?c rdfs:subClassOf* cts:Transduction_Schemes .
?x rdf:type ?c . ?x rdf:type ?c .
} }
""" """
addExecutionInstance = """ addExecutionInstance = """
PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#> PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
INSERT DATA {{<exec_instance> rdf:type {}}} INSERT DATA {{<exec_instance> rdf:type {}}}
""" """
def owl2vowl(ttlFilePath, importList=[]): def owl2vowl(ttlFilePath, importList=[]):
# Run java parser # Run java parser
if importList == []: if importList == []:
cmd = ['java', '-jar', owl2vowlPath, cmd = ['java', '-jar', owl2vowlPath,
'-file', ttlFilePath] '-file', ttlFilePath]
else: else:
cmd = ['java', '-jar', owl2vowlPath, cmd = ['java', '-jar', owl2vowlPath,
'-file', ttlFilePath, '-file', ttlFilePath,
'-dependencies'] + importList '-dependencies'] + importList
with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p: with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:
p.wait() p.wait()
p.stdout.flush() p.stdout.flush()
if p.returncode != 0: if p.returncode != 0:
print("Error in owl2vowl: \n\n"+p.stdout.read().decode()) print("Error in owl2vowl: \n\n"+p.stdout.read().decode())
outFileName = ttlFilePath.split('/')[-1].replace('ttl','json') outFileName = ttlFilePath.split('/')[-1].replace('ttl','json')
os.rename(outFileName, '/opt/webvowl/'+outFileName) os.rename(outFileName, '/opt/webvowl/'+outFileName)
def applyInferStep(uuidStr, graph, step): def applyInferStep(uuidStr, graph, step):
step_ref = "cts:" + step step_ref = "cts:" + step
dest_file = workDir + uuidStr + '-' + step + ".ttl" dest_file = workDir + uuidStr + '-' + step + ".ttl"
base_ref = "http://" + uuidStr + '/' + step base_ref = "http://" + uuidStr + '/' + step
graph.update(clearExecutionInstances) graph.update(clearExecutionInstances)
graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation' graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'
graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference
work_file = dest_file work_file = dest_file
inferResult = shaclInfer(work_file, 'infer') # apply SHACL inference inferResult = shaclInfer(work_file, 'infer') # apply SHACL inference
graph.parse(data=inferResult) # update graph with inference graph.parse(data=inferResult) # update graph with inference
graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference
return graph, inferResult return graph, inferResult
#owl2vowl('/opt/dashboards/printtatic/extraction.ttl', importList=['system-ontology.ttl'])
def createOnto(uuidStr):
# -- Initialization (creation of extraction graph)
step = 'init_graph'
# -- old --- step_ref = "cts:" + step
dest_file = workDir + uuidStr + '-' + step + ".ttl"
base_ref = "http://" + uuidStr + '/' + step
graph = createTenetGraph(uuidStr)
graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- Extraction
graph, _ = applyInferStep(uuidStr, graph, 'preprocessing')
graph, _ = applyInferStep(uuidStr, graph, 'net_extension')
graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation_dga_patch')
# -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation')
# -- dev --- Generation step by step
# -- old --- graph, _ = applyInferStep(uuidStr, graph, 'class_generation')
# -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_1')
# -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_2')
# -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_3_1')
# -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'relation_generation_3_2')
# -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'relation_generation')
# -- dev ---
# -- Preprocessing
# -- old --- step = 'preprocessing'
# -- old --- step_ref = "cts:" + step
# -- old --- dest_file = step + ".ttl"
# -- old --- base_ref = "http://" + step
# -- old --- graph.update(clearExecutionInstances)
# -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- old --- work_file = dest_file
# -- old --- inferResult1 = shaclInfer(work_file, 'infer')
# -- old --- graph.parse(data=inferResult1)
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- Net Extension
# -- old --- step = 'net_extension'
# -- old --- step_ref = "cts:" + step
# -- old --- dest_file = step + ".ttl"
# -- old --- base_ref = "http://" + step
# -- old --- graph.update(clearExecutionInstances)
# -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- old --- work_file = dest_file
# -- old --- inferResult2 = shaclInfer(work_file, 'infer')
# -- old --- graph.parse(data=inferResult2)
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- Generation
# -- old --- step = 'generation'
# -- old --- step_ref = "cts:" + step
# -- old --- dest_file = step + ".ttl"
# -- old --- base_ref = "http://" + step
# -- old --- graph.update(clearExecutionInstances)
# -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference
# -- old --- work_file = dest_file
# -- old --- finalInferResult = shaclInfer(work_file, 'infer') # apply SHACL inference
# -- old --- graph.parse(data=finalInferResult) # update graph with inference
# -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference
# -- Result
# -- old --- file_ref = CONFIG_DIR + schema_file
# -- old --- sys_frame_onto = FRAME_DIR + frame_file
factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'
with open(factoidPath, 'w') as outfile:
outfile.write(finalInferResult)
outfile.close()
owl2vowl(factoidPath)#, importList=[sys_frame_onto])
#result_graph.parse(data=finalInferResult) # update with inferences resulting of extraction final step
#factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'
#result_graph.serialize(destination=factoidPath, base=base_ref, format='turtle')
# On exécute ensuite seulement les règles de génération pour récupérer seulement le nécesaire à la visualisation
# -- old --- graph.update(clearExecutionInstances)
# -- old --- graph.update(addExecutionInstance.format('cts:generation'))
# -- old --- graph.serialize(destination="tmp.ttl", base="http://tmp", format='turtle')
# -- old --- factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'
# -- old --- with open(factoidPath, 'w') as outfile:
# -- old --- inferResult3 = shaclInfer('tmp.ttl', 'infer')
# -- old --- outfile.write(inferResult3)
# -- old --- outfile.close()
``` ```
%% Cell type:code id:40b54849-9333-4819-b953-6e816ffe474c tags: %% Cell type:code id:40b54849-9333-4819-b953-6e816ffe474c tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# Validation # Validation
####################################################################################################### #######################################################################################################
def pyshaclValidate(): def pyshaclValidate():
from pyshacl import validate from pyshacl import validate
data_file = open('tmp.ttl').read() data_file = open('tmp.ttl').read()
shapes_file = open('test-shacl-construct.shapes-order.ttl').read() shapes_file = open('test-shacl-construct.shapes-order.ttl').read()
conforms, v_graph, v_text = validate(data_file, shacl_graph=shapes_file) conforms, v_graph, v_text = validate(data_file, shacl_graph=shapes_file)
print(conforms) print(conforms)
print(v_graph) print(v_graph)
print(v_text) print(v_text)
def loadFactoids(directory): def loadFactoids(directory):
ttlList = glob(directory+"/*/current/*.ttl") ttlList = glob(directory+"/*/current/*.ttl")
g = Graph() g = Graph()
for ttl in ttlList : for ttl in ttlList :
g.parse(ttl) g.parse(ttl)
g.parse('/opt/dashboards/tools/tenet/frame/system-ontology.ttl') g.parse('/opt/dashboards/tools/tenet/frame/system-ontology.ttl')
g.parse('/opt/dashboards/tools/tenet/config/unl-rdf-schema.ttl') g.parse('/opt/dashboards/tools/tenet/config/unl-rdf-schema.ttl')
return(g) return(g)
possibleUnderspecificationQuery =''' possibleUnderspecificationQuery ='''
PREFIX owl: <http://www.w3.org/2002/07/owl#> PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#> PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>
PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#> PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>
PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#> PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>
PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#> PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>
PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>
PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#> PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>
SELECT ?thisId (CONCAT('Parle-t-on bien de tous les "', ?entityLabel, '" possibles ? (', ?subEntities, ')' ) AS ?message) SELECT ?thisId (CONCAT('Parle-t-on bien de tous les "', ?entityLabel, '" possibles ? (', ?subEntities, ')' ) AS ?message)
WHERE { WHERE {
FILTER(?count>1) FILTER(?count>1)
{SELECT ?this ?thisId ?entityLabel (GROUP_CONCAT(?subEntityLabel;SEPARATOR=", ") AS ?subEntities) (COUNT(?subEntityLabel) AS ?count) {SELECT ?this ?thisId ?entityLabel (GROUP_CONCAT(?subEntityLabel;SEPARATOR=", ") AS ?subEntities) (COUNT(?subEntityLabel) AS ?count)
WHERE { WHERE {
?subEntity rdfs:subClassOf ?entity ; rdfs:label ?subEntityLabel . ?subEntity rdfs:subClassOf ?entity ; rdfs:label ?subEntityLabel .
{SELECT ?this ?entity ?thisId ?entityLabel {SELECT ?this ?entity ?thisId ?entityLabel
WHERE { WHERE {
BIND("##ID##" AS ?thisId) BIND("##ID##" AS ?thisId)
?this a unl:UNL_Sentence ; unl:has_id ?thisId . ?this a unl:UNL_Sentence ; unl:has_id ?thisId .
?entity sys:from_structure ?this ; ?entity sys:from_structure ?this ;
rdfs:subClassOf+ sys:Structure ; rdfs:subClassOf+ sys:Structure ;
rdfs:label ?entityLabel . rdfs:label ?entityLabel .
FILTER ( FILTER (
!EXISTS {?subEntity1 rdfs:subClassOf ?entity; sys:from_structure ?this} !EXISTS {?subEntity1 rdfs:subClassOf ?entity; sys:from_structure ?this}
) )
}} }}
} }
GROUP BY ?this ?thisId ?entityLabel } GROUP BY ?this ?thisId ?entityLabel }
} }
''' '''
possibleClassEquivalenceQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#> possibleClassEquivalenceQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#> PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>
PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#> PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>
PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#> PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>
PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#> PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>
PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>
PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#> PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>
SELECT (CONCAT(?messageTMP, ?sentenceList, ')') AS ?message) ?sentenceList SELECT (CONCAT(?messageTMP, ?sentenceList, ')') AS ?message) ?sentenceList
WHERE { WHERE {
SELECT ?messageTMP (GROUP_CONCAT(?sentenceId; SEPARATOR=", ") AS ?sentenceList) FILTER(regex(?sentenceList, "##ID##" ))
{SELECT ?messageTMP (GROUP_CONCAT(?sentenceId; SEPARATOR=", ") AS ?sentenceList)
WHERE { WHERE {
SELECT DISTINCT ?messageTMP ?sentenceId SELECT DISTINCT ?messageTMP ?sentenceId
WHERE { WHERE {
FILTER (?count = 1) FILTER (?count = 1)
?subEntity rdfs:subClassOf ?this ; rdfs:label ?subEntityLabel ; sys:from_structure ?subEntitySentence . ?subEntity rdfs:subClassOf ?this ; rdfs:label ?subEntityLabel ; sys:from_structure ?subEntitySentence .
?this rdfs:label ?thisLabel ; sys:from_structure ?thisSentence . ?this rdfs:label ?thisLabel ; sys:from_structure ?thisSentence .
BIND(CONCAT('"', ?subEntityLabel, '" est la seule sous classe de "', ?thisLabel, '". Ces classes sont-elles équivalentes ? <br/>(voir les exigences ') AS ?messageTMP) BIND(CONCAT('"', ?subEntityLabel, '" est la seule sous classe de "', ?thisLabel, '". Ces classes sont-elles équivalentes ? <br/>(voir les exigences ') AS ?messageTMP)
{BIND("##ID##" AS ?thisSentenceId) {
?thisSentence unl:has_id ?thisSentenceId . ?thisSentence unl:has_id ?thisSentenceId .
BIND (?thisSentenceId AS ?sentenceId)} BIND (?thisSentenceId AS ?sentenceId)}
UNION UNION
{?subEntitySentence unl:has_id ?subEntitySentenceId . {?subEntitySentence unl:has_id ?subEntitySentenceId .
BIND (?subEntitySentenceId AS ?sentenceId)} BIND (?subEntitySentenceId AS ?sentenceId)}
FILTER(NOT EXISTS {?subEntity sys:from_structure ?thisSentence}) FILTER(NOT EXISTS {?subEntity sys:from_structure ?thisSentence})
{SELECT ?this (COUNT(?subClass) AS ?count) {SELECT ?this (COUNT(?subClass) AS ?count)
WHERE { WHERE {
?this rdfs:subClassOf+ sys:Structure . ?this rdfs:subClassOf+ sys:Structure .
?subClass rdfs:subClassOf ?this ?subClass rdfs:subClassOf ?this
} GROUP BY ?this } } GROUP BY ?this }
} ORDER BY ?sentenceId } ORDER BY ?sentenceId
} GROUP BY ?messageTMP } GROUP BY ?messageTMP }
}''' }'''
unfeaturedDomainOrRangeQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#> unfeaturedDomainOrRangeQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#> PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>
PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#> PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>
PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#> PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>
PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#> PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>
PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>
PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#> PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>
SELECT ?sentenceId (CONCAT(?messageTMP, GROUP_CONCAT(?featureLabel ; SEPARATOR=', ')) AS ?message) SELECT ?sentenceId (CONCAT(?messageTMP, GROUP_CONCAT(?featureLabel ; SEPARATOR=', ')) AS ?message)
WHERE { WHERE {
SELECT DISTINCT ?sentenceId ?featureLabel (CONCAT( 'Dans cette exigence, "', ?unfeaturedLabel, '" pourrait être précisé par : ') AS ?messageTMP) SELECT DISTINCT ?sentenceId ?featureLabel (CONCAT( 'Dans cette exigence, "', ?unfeaturedLabel, '" pourrait être précisé par : ') AS ?messageTMP)
WHERE { WHERE {
{ {
?p rdfs:subPropertyOf+ sys:Property ; ?p rdfs:subPropertyOf+ sys:Property ;
rdfs:domain ?featured ; rdfs:domain ?featured ;
rdfs:range ?unfeatured . rdfs:range ?unfeatured .
} }
UNION UNION
{ {
?p rdfs:subPropertyOf+ sys:Property ; ?p rdfs:subPropertyOf+ sys:Property ;
rdfs:domain ?unfeatured ; rdfs:domain ?unfeatured ;
rdfs:range ?featured . rdfs:range ?featured .
} }
#?p rdfs:label ?pLabel . #?p rdfs:label ?pLabel .
?featured sys:has_feature ?feature . ?featured sys:has_feature ?feature .
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?unfeatured sys:has_feature ?feature ?unfeatured sys:has_feature ?feature
}) })
?featuredInstance a ?featured ; sys:from_structure ?sentence. ?featuredInstance a ?featured ; sys:from_structure ?sentence.
?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence. ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.
BIND("##ID##" AS ?sentenceId) BIND("##ID##" AS ?sentenceId)
?sentence unl:has_id ?sentenceId . ?sentence unl:has_id ?sentenceId .
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?featuredInstance a ?featured2 . ?featuredInstance a ?featured2 .
?featured2 rdfs:subClassOf ?featured . ?featured2 rdfs:subClassOf ?featured .
}) })
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?unfeaturedInstance a ?unfeatured2 . ?unfeaturedInstance a ?unfeatured2 .
?unfeatured2 rdfs:subClassOf ?unfeatured . ?unfeatured2 rdfs:subClassOf ?unfeatured .
}) })
?featured rdfs:label ?featuredLabel . ?featured rdfs:label ?featuredLabel .
?unfeatured rdfs:label ?unfeaturedLabel . ?unfeatured rdfs:label ?unfeaturedLabel .
?feature rdfs:label ?featureLabel . ?feature rdfs:label ?featureLabel .
} }
} GROUP BY ?sentenceId ?messageTMP } GROUP BY ?sentenceId ?messageTMP
''' '''
unfeaturedDomainOrRangeWithRefQuery ='''PREFIX owl: <http://www.w3.org/2002/07/owl#> unfeaturedDomainOrRangeWithRefQuery ='''PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#> PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>
PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#> PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>
PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#> PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>
PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#> PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>
PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>
PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#> PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>
SELECT ?sentenceId (CONCAT('"', ?unfeaturedLabel, '" pourrait être précisé par un ou plusieurs attributs parmis : ', ?featureList, '. <br/>(exemples de référence : ', GROUP_CONCAT(?sentence2id ; SEPARATOR=', '), ').') AS ?message) SELECT ?sentenceId (CONCAT('"', ?unfeaturedLabel, '" pourrait être précisé par un ou plusieurs attributs parmis : ', ?featureList, '. <br/>(exemples de référence : ', GROUP_CONCAT(?sentence2id ; SEPARATOR=', '), ').') AS ?message)
WHERE { WHERE {
{SELECT DISTINCT ?sentenceId ?unfeaturedLabel ?sentence2id (GROUP_CONCAT(?featureLabel ; SEPARATOR=', ') AS ?featureList) # {SELECT DISTINCT ?sentenceId ?unfeaturedLabel ?sentence2id (GROUP_CONCAT(?featureLabel ; SEPARATOR=', ') AS ?featureList) #
WHERE { WHERE {
SELECT DISTINCT ?sentenceId ?sentence2id ?unfeaturedLabel ?featureLabel ?otherwiseFeaturedLabel ?featured2label SELECT DISTINCT ?sentenceId ?sentence2id ?unfeaturedLabel ?featureLabel ?otherwiseFeaturedLabel ?featured2label
WHERE { WHERE {
{ {
?p rdfs:subPropertyOf+ sys:Property ; ?p rdfs:subPropertyOf+ sys:Property ;
rdfs:domain ?featured ; rdfs:domain ?featured ;
rdfs:range ?unfeatured . rdfs:range ?unfeatured .
} }
UNION UNION
{ {
?p rdfs:subPropertyOf+ sys:Property ; ?p rdfs:subPropertyOf+ sys:Property ;
rdfs:domain ?unfeatured ; rdfs:domain ?unfeatured ;
rdfs:range ?featured . rdfs:range ?featured .
} }
#?p rdfs:label ?pLabel . #?p rdfs:label ?pLabel .
?featured sys:has_feature ?feature . ?featured sys:has_feature ?feature .
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?unfeatured sys:has_feature ?feature ?unfeatured sys:has_feature ?feature
}) })
?featuredInstance a ?featured ; sys:from_structure ?sentence. ?featuredInstance a ?featured ; sys:from_structure ?sentence.
?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence. ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.
BIND("##ID##" AS ?sentenceId) BIND("##ID##" AS ?sentenceId)
?sentence unl:has_id ?sentenceId . ?sentence unl:has_id ?sentenceId .
?otherwiseFeatured rdfs:subClassOf ?unfeatured ; sys:has_feature ?feature2 ; rdfs:label ?otherwiseFeaturedLabel. ?otherwiseFeatured rdfs:subClassOf ?unfeatured ; sys:has_feature ?feature2 ; rdfs:label ?otherwiseFeaturedLabel.
?otherwiseFeaturedInstance a ?otherwiseFeatured ; sys:from_structure ?sentence2. ?otherwiseFeaturedInstance a ?otherwiseFeatured ; sys:from_structure ?sentence2.
?sentence2 unl:has_id ?sentence2id . ?sentence2 unl:has_id ?sentence2id .
{?otherwiseFeaturedInstance ?p2 ?featuredInstance2} UNION { ?featuredInstance2 ?p2 ?otherwiseFeaturedInstance} {?otherwiseFeaturedInstance ?p2 ?featuredInstance2} UNION { ?featuredInstance2 ?p2 ?otherwiseFeaturedInstance}
?featuredInstance2 a ?featured2 . ?featuredInstance2 a ?featured2 .
?featured2 sys:has_feature ?feature2 ; rdfs:label ?featured2label. ?featured2 sys:has_feature ?feature2 ; rdfs:label ?featured2label.
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?featuredInstance a ?featured2 . ?featuredInstance a ?featured2 .
?featured2 rdfs:subClassOf ?featured . ?featured2 rdfs:subClassOf ?featured .
}) })
FILTER(NOT EXISTS { FILTER(NOT EXISTS {
?unfeaturedInstance a ?unfeatured2 . ?unfeaturedInstance a ?unfeatured2 .
?unfeatured2 rdfs:subClassOf ?unfeatured . ?unfeatured2 rdfs:subClassOf ?unfeatured .
}) })
?featured rdfs:label ?featuredLabel . ?featured rdfs:label ?featuredLabel .
?unfeatured rdfs:label ?unfeaturedLabel . ?unfeatured rdfs:label ?unfeaturedLabel .
?feature rdfs:label ?featureLabel . ?feature rdfs:label ?featureLabel .
} }
} GROUP BY ?sentenceId ?unfeaturedLabel ?sentence2id} } GROUP BY ?sentenceId ?unfeaturedLabel ?sentence2id}
} GROUP BY ?sentenceId ?unfeaturedLabel ?featureList } GROUP BY ?sentenceId ?unfeaturedLabel ?featureList
''' '''
queryTypeDic = {'error':[possibleClassEquivalenceQuery], queryTypeDic = {'error':[possibleClassEquivalenceQuery],
'warning':[possibleUnderspecificationQuery], 'warning':[possibleUnderspecificationQuery],
'info':[unfeaturedDomainOrRangeWithRefQuery]} 'info':[unfeaturedDomainOrRangeWithRefQuery]}
# On charge les factoïdes pour vérification des nouvelles phrases # On charge les factoïdes pour vérification des nouvelles phrases
g = loadFactoids(storeBase) g = loadFactoids(storeBase)
sparqlUpdate.setQuery('INSERT DATA {'+g.serialize(format='nt')+'}') sparqlUpdate.setQuery('INSERT DATA {'+g.serialize(format='nt')+'}')
sparqlLog = sparqlUpdate.query() sparqlLog = sparqlUpdate.query()
g.serialize(destination='/opt/dashboards/store/extraction2.ttl', format='turtle')
owl2vowl('/opt/dashboards/store/extraction2.ttl')
#factoidList = [x.replace('..','/opt/dashboards') for x in glob(storeBase+"/*/current/*factoid.ttl")]
#owl2vowl(FRAME_DIR + frame_file, importList=factoidList)
def createVerificationMessages(directory):
reqId = directory.split('/')[-1]
for messageType in ['error', 'warning', 'info']:
#print(messageType)
messagesStr = ''
for queryTMP in queryTypeDic[messageType] :
query = queryTMP.replace("##ID##",reqId)
sparqlQuery.setQuery(query)
results = sparqlQuery.query().convert()
for result in results["results"]["bindings"]:
message = result["message"]["value"]
if message not in messagesStr :
#print("printing")
messagesStr+=message+"\n"
outFile = open(directory+'/current/'+reqId+'_'+messageType+'Messages.txt', 'w')
outFile.write(messagesStr)
outFile.close()
#g.serialize(destination='/opt/dashboards/store/SRSA-IP_demo.ttl', format='turtle') def createOnto(uuidStr):
# -- Initialization (creation of extraction graph)
step = 'init_graph'
#errorReqDic = {} dest_file = workDir + uuidStr + '-' + step + ".ttl"
#warningReqDic = {} base_ref = "http://" + uuidStr + '/' + step
#infoReqDic = {} graph = createTenetGraph(uuidStr)
graph.serialize(destination=dest_file, base=base_ref, format='turtle')
# -- Extraction
#possibleUnderspecification = g.query(possibleUnderspecificationQuery) graph, _ = applyInferStep(uuidStr, graph, 'preprocessing')
#possibleClassEquivalence = g.query(possibleClassEquivalenceQuery) graph, _ = applyInferStep(uuidStr, graph, 'net_extension')
#unfeaturedDomainOrRange = g.query(unfeaturedDomainOrRangeQuery) graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation_dga_patch')
# -- Write result
factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'
with open(factoidPath, 'w') as outfile:
#for r in possibleUnderspecification : outfile.write(finalInferResult)
# if str(r['thisId']) in warningReqDic: outfile.close()
# warningReqDic[str(r['thisId'])] += [str(r['message'])] # -- Webvowl
# else: owl2vowl(factoidPath)
# warningReqDic[str(r['thisId'])] = [str(r['message'])] # -- Verification
factoidGraph = Graph()
#for r in possibleClassEquivalence : factoidGraph.parse(factoidPath)
# sentenceList = str(r['sentenceList']).split(', ') sparqlUpdate.setQuery('INSERT DATA {'+factoidGraph.serialize(format='nt')+'}')
# for sentence in sentenceList: sparqlLog = sparqlUpdate.query()
# if sentence in errorReqDic: createVerificationMessages(storeBase+uuidStr)
# errorReqDic[sentence] += [str(r['message'])]
# else:
# errorReqDic[sentence] = [str(r['message'])]
#for r in unfeaturedDomainOrRange :
# if str(r['sentenceId']) in infoReqDic:
# infoReqDic[str(r['sentenceId'])] += [str(r['message'])]
# else:
# infoReqDic[str(r['sentenceId'])] = [str(r['message'])]
``` ```
%% Cell type:code id:744abdb9-b3d6-4025-abc9-2f749644c3ed tags: %% Cell type:code id:744abdb9-b3d6-4025-abc9-2f749644c3ed tags:
``` python ``` python
# Fonctions pour la mise à jour globale du corpus (ne pas exécuter en mode "Tableau de bord") # Fonctions pour la mise à jour globale du corpus (ne pas exécuter en mode "Tableau de bord")
def updateAllFactoids(): def updateAllFactoids():
dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*') dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')
for directory in dirList: for directory in dirList:
if directory.split('/')[-1] != '0_NONE': if directory.split('/')[-1] != '0_NONE':
print(directory) print(directory)
reqId = directory.split('/')[-1] reqId = directory.split('/')[-1]
createOnto(reqId) createOnto(reqId)
#updateAllFactoids() #updateAllFactoids()
def updateAllVerificationMessages(): def updateAllVerificationMessages():
dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*') dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')
for directory in dirList: for directory in dirList:
if directory.split('/')[-1] != '0_NONE': if directory.split('/')[-1] != '0_NONE':
print(directory) print(directory)
reqId = directory.split('/')[-1] createVerificationMessages(directory)
for messageType in ['error', 'warning', 'info']:
#print(messageType)
messagesStr = ''
for queryTMP in queryTypeDic[messageType] :
query = queryTMP.replace("##ID##",reqId)
sparqlQuery.setQuery(query)
results = sparqlQuery.query().convert()
for result in results["results"]["bindings"]:
message = result["message"]["value"]
if message not in messagesStr :
#print("printing")
messagesStr+=message+"\n"
outFile = open(directory+'/current/'+reqId+'_'+messageType+'Messages.txt', 'w')
outFile.write(messagesStr)
outFile.close()
#updateAllVerificationMessages() #updateAllVerificationMessages()
#query = possibleUnderspecificationQuery.replace("##ID##","SRSA-IP_STB_PHON_00500") #query = possibleUnderspecificationQuery.replace("##ID##","SRSA-IP_STB_PHON_00500")
#for r in g.query(query): #for r in g.query(query):
#print(r['message']) #print(r['message'])
``` ```
%% Cell type:code id:ca43f1f2-42ef-4355-a2e2-e27351a51b96 tags: %% Cell type:code id:ca43f1f2-42ef-4355-a2e2-e27351a51b96 tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# Navigateur / éditeur de corpus UNL # Navigateur / éditeur de corpus UNL
####################################################################################################### #######################################################################################################
saveButtonClicks = 0 saveButtonClicks = 0
def main_pane(directory): def main_pane(directory):
saveButtonClicks = 0 saveButtonClicks = 0
saveButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100) saveButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)
saveButtonDic = dict(button=saveButton) saveButtonDic = dict(button=saveButton)
saveCommentButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100) saveCommentButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)
path = storeBase+directory+'/current/' path = storeBase+directory+'/current/'
pathOrig = storeBase+directory+'/orig/' pathOrig = storeBase+directory+'/orig/'
svgPath = path+directory+'.svg' svgPath = path+directory+'.svg'
pngPath = path+directory+'.png' pngPath = path+directory+'.png'
unlPath = path+directory+'.unl' unlPath = path+directory+'.unl'
rdfPath = path+directory+'.ttl' rdfPath = path+directory+'.ttl'
commentPath = path+directory+'.comments' commentPath = path+directory+'.comments'
with open(commentPath) as commentFile: with open(commentPath) as commentFile:
commentStr = commentFile.read() commentStr = commentFile.read()
commentFile.close() commentFile.close()
with open(unlPath) as unlFile: with open(unlPath) as unlFile:
unlStr = unlFile.read() unlStr = unlFile.read()
unlFile.close() unlFile.close()
svgPathOrig = pathOrig+directory+'.svg' svgPathOrig = pathOrig+directory+'.svg'
pngPathOrig = pathOrig+directory+'.png' pngPathOrig = pathOrig+directory+'.png'
unlPathOrig = pathOrig+directory+'.unl' unlPathOrig = pathOrig+directory+'.unl'
rdfPathOrig = pathOrig+directory+'.ttl' rdfPathOrig = pathOrig+directory+'.ttl'
with open(unlPathOrig) as unlFileOrig: with open(unlPathOrig) as unlFileOrig:
unlStrOrig = unlFileOrig.read() unlStrOrig = unlFileOrig.read()
unlFileOrig.close() unlFileOrig.close()
unlHtmlOrig = unlStrOrig.replace("\n","<br/>") unlHtmlOrig = unlStrOrig.replace("\n","<br/>")
if unlStrOrig == unlStr: if unlStrOrig == unlStr:
modIndicator = '' modIndicator = ''
else: else:
modIndicator = ' <u>modifié</u>' modIndicator = ' <u>modifié</u>'
regexFr = re.compile("{org:fr}\n(.*?)\n{/org}",re.MULTILINE|re.DOTALL) regexFr = re.compile("{org:fr}\n(.*?)\n{/org}",re.MULTILINE|re.DOTALL)
try: try:
frStr = regexFr.search(unlStr).group(1) frStr = regexFr.search(unlStr).group(1)
except AttributeError: except AttributeError:
frStr = '' frStr = ''
regexEn = re.compile("{en}\n(.*?)\n{/en}",re.MULTILINE|re.DOTALL) regexEn = re.compile("{en}\n(.*?)\n{/en}",re.MULTILINE|re.DOTALL)
try: try:
enStr = regexEn.search(unlStr).group(1) enStr = regexEn.search(unlStr).group(1)
except AttributeError: except AttributeError:
enStr = '' enStr = ''
unlOrig_html = pn.pane.HTML(unlHtmlOrig) unlOrig_html = pn.pane.HTML(unlHtmlOrig)
unl_input = pn.widgets.input.TextAreaInput(height=400) unl_input = pn.widgets.input.TextAreaInput(height=400)
unl_input.value = unlStr unl_input.value = unlStr
comment_input = pn.widgets.input.TextAreaInput(height=300) comment_input = pn.widgets.input.TextAreaInput(height=300)
comment_input.value = commentStr comment_input.value = commentStr
downloadSvg = pn.widgets.FileDownload(sizing_mode='stretch_width', file=svgPath, embed=True, name='Télécharger le graphe en SVG :') downloadSvg = pn.widgets.FileDownload(sizing_mode='stretch_width', file=svgPath, embed=True, name='Télécharger le graphe en SVG :')
downloadPng = pn.widgets.FileDownload(sizing_mode='stretch_width', file=pngPath, embed=True, name='Télécharger le graphe en PNG :') downloadPng = pn.widgets.FileDownload(sizing_mode='stretch_width', file=pngPath, embed=True, name='Télécharger le graphe en PNG :')
downloadRdf = pn.widgets.FileDownload(sizing_mode='stretch_width', file=rdfPath, embed=True, name='Télécharger le code UNL-RDF :') downloadRdf = pn.widgets.FileDownload(sizing_mode='stretch_width', file=rdfPath, embed=True, name='Télécharger le code UNL-RDF :')
downloadUnl = pn.widgets.FileDownload(sizing_mode='stretch_width', file=unlPath, embed=True, name='Télécharger le code UNL :') downloadUnl = pn.widgets.FileDownload(sizing_mode='stretch_width', file=unlPath, embed=True, name='Télécharger le code UNL :')
def compute_unl_graph_pane(button): def compute_unl_graph_pane(button):
global saveButtonClicks global saveButtonClicks
if saveButtonClicks != 0: if saveButtonClicks != 0:
writeUnlFiles(unl_input.value, storeBase+selectDir.value+'/current/'+selectDir.value) writeUnlFiles(unl_input.value, storeBase+selectDir.value+'/current/'+selectDir.value)
pane = pn.pane.PNG(pngPath, width = pane_width) pane = pn.pane.PNG(pngPath, width = pane_width)
saveButtonClicks += 1 saveButtonClicks += 1
return(pane) return(pane)
unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic) unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)
warnColumn = pn.Card(width=pane_width, header = "**Alertes pour l'exigence :**") warnColumn = pn.Card(width=pane_width, header = "**Alertes pour l'exigence :**")
warnings = 0 warnings = 0
try: try:
errorfile = open(storeBase+directory+'/current/'+directory+'_errorMessages.txt','r') errorfile = open(storeBase+directory+'/current/'+directory+'_errorMessages.txt','r')
errorlist = errorfile.readlines() errorlist = errorfile.readlines()
errorfile.close() errorfile.close()
for error in errorlist: for error in errorlist:
warnColumn.append(pn.pane.HTML('<div class="alert alert-danger">{}</div>'.format(error))) warnColumn.append(pn.pane.HTML('<div class="alert alert-danger">{}</div>'.format(error)))
if errorlist != [] : if errorlist != [] :
warnings = 1 warnings = 1
except : except :
pass pass
try: try:
warnfile = open(storeBase+directory+'/current/'+directory+'_warningMessages.txt','r') warnfile = open(storeBase+directory+'/current/'+directory+'_warningMessages.txt','r')
warnlist = warnfile.readlines() warnlist = warnfile.readlines()
warnfile.close() warnfile.close()
for warn in warnlist: for warn in warnlist:
warnColumn.append(pn.pane.HTML('<div class="alert alert-warning">{}</div>'.format(warn))) warnColumn.append(pn.pane.HTML('<div class="alert alert-warning">{}</div>'.format(warn)))
if warnlist != [] : if warnlist != [] :
warnings = 1 warnings = 1
except : except :
pass pass
try: try:
infofile = open(storeBase+directory+'/current/'+directory+'_infoMessages.txt','r') infofile = open(storeBase+directory+'/current/'+directory+'_infoMessages.txt','r')
infolist = infofile.readlines() infolist = infofile.readlines()
infofile.close() infofile.close()
for info in infolist: for info in infolist:
warnColumn.append(pn.pane.HTML('<div class="alert alert-info">{}</div>'.format(info))) warnColumn.append(pn.pane.HTML('<div class="alert alert-info">{}</div>'.format(info)))
if infolist != [] : if infolist != [] :
warnings = 1 warnings = 1
except : except :
pass pass
if warnings == 0: if warnings == 0:
warnColumn.append(pn.pane.HTML('<div class="alert alert-info">Pas d\'anomalie détectée</div>')) warnColumn.append(pn.pane.HTML('<div class="alert alert-info">Pas d\'anomalie détectée</div>'))
pane = pn.Column( pane = pn.Column(
pn.Row( pn.Row(
pn.pane.HTML('<a href="https://unsel.tetras-lab.io/webvowl#{}_factoid" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'ontologie construite</button><a>'.format(directory)), pn.pane.HTML('<a href="https://unsel.tetras-lab.io/webvowl#{}_factoid" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'ontologie construite</button><a>'.format(directory)),
pn.Column(pn.pane.HTML('<font size="tiny">Exigence sélectionnée : '+directory+'</font>'), sizing_mode='stretch_width'), pn.Column(pn.pane.HTML('<font size="tiny">Exigence sélectionnée : '+directory+'</font>'), sizing_mode='stretch_width'),
), ),
#pn.Card( #pn.Card(
# pn.pane.HTML('''<iframe id="inlineFrameExample" # pn.pane.HTML('''<iframe id="inlineFrameExample"
# title="Inline Frame Example" # title="Inline Frame Example"
# width="{}" # width="{}"
# height="800" # height="800"
# src="https://unsel.tetras-lab.io/webvowl/#{}_factoid"> # src="https://unsel.tetras-lab.io/webvowl/#{}_factoid">
# </iframe>'''.format(pane_width,selectDir.value)), # </iframe>'''.format(pane_width,selectDir.value)),
# title = "Visualiser le factoid", width=pane_width, collapsed=True), # title = "Visualiser le factoid", width=pane_width, collapsed=True),
warnColumn, warnColumn,
pn.pane.HTML('FR : '+frStr), pn.pane.HTML('FR : '+frStr),
pn.pane.HTML('EN : '+enStr), pn.pane.HTML('EN : '+enStr),
unl_graph_pane[1], unl_graph_pane[1],
pn.Card(pn.Column(saveCommentButton, comment_input, width = pane_width), pn.Card(pn.Column(saveCommentButton, comment_input, width = pane_width),
header='**Commentaires**', header='**Commentaires**',
collapsed=True, width = pane_width), collapsed=True, width = pane_width),
pn.Card(pn.Column(saveButton, unl_input, width = pane_width), pn.Card(pn.Column(saveButton, unl_input, width = pane_width),
header='**Code UNL**'+modIndicator, header='**Code UNL**'+modIndicator,
collapsed=True, width = pane_width), collapsed=True, width = pane_width),
pn.Card(pn.Column( pn.Card(pn.Column(
unlOrig_html, unlOrig_html,
pn.Card(pn.pane.PNG(pngPath, width = pane_width-20), header = "**Graphe d'origine**", width=pane_width-10) pn.Card(pn.pane.PNG(pngPath, width = pane_width-20), header = "**Graphe d'origine**", width=pane_width-10)
), ),
header="**Code UNL d'origine**", header="**Code UNL d'origine**",
collapsed=True, width = pane_width), collapsed=True, width = pane_width),
pn.WidgetBox( pn.WidgetBox(
pn.Row(downloadPng, downloadSvg), pn.Row(downloadPng, downloadSvg),
pn.Row(downloadUnl, downloadRdf), pn.Row(downloadUnl, downloadRdf),
width=pane_width, width=pane_width,
), ),
width=pane_width, width=pane_width,
) )
def saveComments(event): def saveComments(event):
with open(commentPath, 'w') as commentFile: with open(commentPath, 'w') as commentFile:
commentFile.write(comment_input.value) commentFile.write(comment_input.value)
commentFile.close() commentFile.close()
saveCommentButton.on_click(saveComments) saveCommentButton.on_click(saveComments)
return(pane) return(pane)
``` ```
%% Cell type:code id:5d4ec56e-d0bb-44c8-975b-49d409b6b160 tags: %% Cell type:code id:5d4ec56e-d0bb-44c8-975b-49d409b6b160 tags:
``` python ``` python
####################################################################################################### #######################################################################################################
# Analyse avec UNseL-inter
#######################################################################################################
def extractOnClick(event):
uuidStr = "COMP"+str(uuid.uuid4())
createFolderFromUnselInter(uuidStr)
selectDir.options[uuidStr] = uuidStr
selectDir.value = uuidStr
createOnto(uuidStr)
buttonExtract = pn.widgets.Button(name="Construire l'ontologie de la phrase (factoïd)", width=300)
buttonExtract.param.watch(extractOnClick, 'clicks')
#######################################################################################################
# Navigation Interface # Navigation Interface
####################################################################################################### #######################################################################################################
pathList = glob(storeBase+'*') pathList = glob(storeBase+'*')
dirList = sorted([x.split('/')[-1] for x in pathList]) dirList = sorted([x.split('/')[-1] for x in pathList])
#warningList = sorted(list(errorReqDic.keys())+list(warningReqDic.keys())+list(infoReqDic.keys())) #warningList = sorted(list(errorReqDic.keys())+list(warningReqDic.keys())+list(infoReqDic.keys()))
warningList = [] warningList = []
dirDic = {} dirDic = {}
for directory in dirList: for directory in dirList:
if directory in warningList: if directory in warningList:
dirDic['**'+directory] = directory dirDic['**'+directory] = directory
else: else:
dirDic[directory] = directory dirDic[directory] = directory
dirDic = dict(sorted(dirDic.items())) dirDic = dict(sorted(dirDic.items()))
selectDir = pn.widgets.Select(name='Sélectionnez une exigence : ', options=dirDic, width = 350) selectDir = pn.widgets.Select(name='Sélectionnez une exigence : ', options=dirDic, width = 350)
selectDir.value = '0_NONE' selectDir.value = '0_NONE'
dir_selector = dict(directory=selectDir)#, save=saveButton) dir_selector = dict(directory=selectDir)#, save=saveButton)
#######################################################################################################
# Analyse avec UNseL-inter
#######################################################################################################
def extractOnClick(event):
uuidStr = "COMP"+str(uuid.uuid4())
createFolderFromUnselInter(uuidStr)
createOnto(uuidStr)
selectDir.options[uuidStr] = uuidStr
selectDir.value = uuidStr
##################################################
buttonExtract = pn.widgets.Button(name="Construire l'ontologie de la phrase (factoïd)", width=300)
buttonExtract.param.watch(extractOnClick, 'clicks')
unlNavigatorPane = pn.interact(lambda directory : main_pane(directory), **dir_selector) unlNavigatorPane = pn.interact(lambda directory : main_pane(directory), **dir_selector)
#unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic) #unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)
# Main interface # Main interface
pn.Column( pn.Column(
pn.Card( pn.Row( pn.Card( pn.Row(
pn.Spacer(sizing_mode='stretch_width'), pn.Spacer(sizing_mode='stretch_width'),
pn.Column( pn.Column(
pn.pane.HTML('''<iframe id="inlineFrameExample" pn.pane.HTML('''<iframe id="inlineFrameExample"
title="Inline Frame Example" title="Inline Frame Example"
width="1000" width="1000"
height="600" height="600"
src="https://lingwarium.org/UNseL-inter/"> src="https://lingwarium.org/UNseL-inter/">
</iframe>'''), </iframe>'''),
buttonExtract), buttonExtract),
pn.Spacer(sizing_mode='stretch_width'), pn.Spacer(sizing_mode='stretch_width'),
), ),
title = "Analyser une nouvelle exigence", width=pane_width+50, collapsed=True,), title = "Analyser une nouvelle exigence", width=pane_width+50, collapsed=True,),
pn.layout.Divider(), pn.layout.Divider(),
pn.Card( pn.Card(
pn.Row( pn.Row(
pn.layout.HSpacer(), pn.layout.HSpacer(),
pn.Column( pn.Column(
selectDir, selectDir,
unlNavigatorPane[1], unlNavigatorPane[1],
), ),
pn.layout.HSpacer(), pn.layout.HSpacer(),
), ),
title = "Naviguer dans les exigences", width=pane_width+50, collapsed=True title = "Naviguer dans les exigences", width=pane_width+50, collapsed=True
), ),
pn.layout.Divider(), pn.layout.Divider(),
pn.Card( pn.Card(
pn.pane.HTML('''<iframe id="inlineFrameExample" pn.pane.HTML('''<iframe id="inlineFrameExample"
title="Inline Frame Example" title="Inline Frame Example"
width="{}" width="{}"
height="800" height="800"
src="https://unsel.tetras-lab.io/webvowl/#extraction"> src="https://unsel.tetras-lab.io/webvowl/#extraction">
</iframe>'''.format(pane_width)), </iframe>'''.format(pane_width)),
pn.Row( pn.Row(
#pn.pane.HTML('<a href="https://unsel.tetras-lab.io/webvowl#extraction" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'ontologie construite pour tout le corpus</button><a>'), #pn.pane.HTML('<a href="https://unsel.tetras-lab.io/webvowl#extraction" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'ontologie construite pour tout le corpus</button><a>'),
#pn.pane.HTML('<a href="https://unsel.tetras-lab.io/static/webvowl_1.1.7/index.html#extraction_SRSA-IP_STB_PHON_00100" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'extraction de SRSA-IP_STB_PHON_00100</button><a>'), #pn.pane.HTML('<a href="https://unsel.tetras-lab.io/static/webvowl_1.1.7/index.html#extraction_SRSA-IP_STB_PHON_00100" target="_blank"><button type="button" class="btn btn-outline-secondary btn-sm">Visualiser l\'extraction de SRSA-IP_STB_PHON_00100</button><a>'),
pn.widgets.FileDownload(file='/opt/dashboards/static/extraction.ttl', embed=True) pn.widgets.FileDownload(file='/opt/dashboards/static/extraction.ttl', embed=True)
), title = "Résultats sur le corpus", width=pane_width+50, collapsed=True ), title = "Résultats sur le corpus", width=pane_width+50, collapsed=True
), ),
) )
``` ```
%% Cell type:code id:e06f5381-6c2d-4762-bcb9-a914fb5889e3 tags: %% Cell type:code id:e06f5381-6c2d-4762-bcb9-a914fb5889e3 tags:
``` python ``` python
#list = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*') #list = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')
#for d in list : #for d in list :
# print(d) # print(d)
# uuidStr = d.replace('/opt/dashboards/store/CCTP-SRSA-IP-20210831/','') # uuidStr = d.replace('/opt/dashboards/store/CCTP-SRSA-IP-20210831/','')
# createOnto(uuidStr) # createOnto(uuidStr)
``` ```
%% Cell type:code id:47f67a4b-4d6c-4192-93b1-e697fd2c0e73 tags:
``` python
```
%% Cell type:code id:18d75469-934f-4021-9126-ca0c1dcd4d98 tags:
``` python
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment