diff --git a/tetras-mars-web-app-advanced.ipynb b/tetras-mars-web-app-advanced.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..f473c79f66911cdd145dc3b2b8a3f76a96a277e2
--- /dev/null
+++ b/tetras-mars-web-app-advanced.ipynb
@@ -0,0 +1,1105 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "160ab8ab-091e-4b00-b63a-c3746c71c540",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import panel as pn\n",
+    "import param\n",
+    "from glob import glob\n",
+    "import base64\n",
+    "import re\n",
+    "import requests\n",
+    "from subprocess import Popen, PIPE, STDOUT\n",
+    "import html\n",
+    "from cairosvg import svg2png\n",
+    "import graphviz\n",
+    "from rdflib import Graph, Namespace, URIRef\n",
+    "import os\n",
+    "from collections import OrderedDict\n",
+    "from urllib.request import urlopen\n",
+    "import uuid\n",
+    "import shutil\n",
+    "from SPARQLWrapper import SPARQLWrapper, JSON\n",
+    "\n",
+    "pn.extension(comms='ipywidgets')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7b4685d7-698d-4a86-a0a4-a81d337bc9d7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Parameters\n",
+    "#######################################################################################################\n",
+    "shaclBase = '/opt/dashboards/tools/shacl-1.3.2/bin/'\n",
+    "owl2vowlPath = '/opt/dashboards/tools/owl2vowl_0.3.7/owl2vowl.jar'\n",
+    "storeBase = '../store/CCTP-SRSA-IP-20210831/'\n",
+    "extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/work_graph.ttl' # -- old --- extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/extraction-data-9.ttl'\n",
+    "workDir = 'work-data/'\n",
+    "webvowlData = '/opt/webvowl'\n",
+    "pane_width = 1250\n",
+    "\n",
+    "# Fuseki\n",
+    "fusekiBase = \"https://fuseki.unsel.tetras-lab.io/unsel/\"\n",
+    "sparqlQuery = SPARQLWrapper(fusekiBase+'query',returnFormat=JSON)\n",
+    "sparqlUpdate = SPARQLWrapper(fusekiBase+'update')\n",
+    "sparqlUpdate.method = 'POST'\n",
+    "sparqlUpdate.setCredentials(\"admin\", \"wezW3EHTH4LfEdaKtnC9errLH1YwVXssIO6DUfnjGAHuBApSfvDb4R1uDX5JmSVK\")\n",
+    "sparqlUpdate.setQuery(\"DELETE {?s ?p ?o} WHERE {?s ?p ?o}\")\n",
+    "sparqlLog = sparqlUpdate.query()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "2c41c319-4beb-4a85-a232-61a12d00cdbf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# UNL tools functions\n",
+    "#######################################################################################################\n",
+    "\n",
+    "def unl2stuff(unlFilePath, jarPath, outPrefix, outType):\n",
+    "    # Run java parser\n",
+    "    cmd = ['java', '-jar', jarPath,\n",
+    "           '--input-file', unlFilePath,\n",
+    "           '--output-file', outPrefix,\n",
+    "           '--output-type', outType]\n",
+    "    with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:\n",
+    "        p.wait()\n",
+    "        p.stdout.flush()\n",
+    "        if p.returncode != 0:\n",
+    "            print(\"Error in unl2rdf: \\n\\n\"+p.stdout.read().decode())\n",
+    "            print('UNL;')\n",
+    "            #print(text)\n",
+    "            \n",
+    "def unl2dotWeb(unldata) :\n",
+    "    data={'unl': unldata, 'outputs':['dot', 'svg', 'rdf']}\n",
+    "    try:\n",
+    "        r = requests.post('https://unl.demo.tetras-libre.fr/unl2rdf', data=data)\n",
+    "    except Exception as e:\n",
+    "        return 'Error calling https://unl.demo.tetras-libre.fr/unl2rdf : \"{error}\"'.format(error=e)\n",
+    "    html=r.text\n",
+    "    # On utilise une regex au lieu de parser le html car ce dernier est mal formé\n",
+    "    regexSvg = re.compile('<svg.*svg>',re.MULTILINE|re.DOTALL)\n",
+    "    regexRdf = re.compile(\"<code id='rdf' class='collapse show'>(.*?)</code>\",re.MULTILINE|re.DOTALL)\n",
+    "    try : \n",
+    "        svg = regexSvg.search(html).group()\n",
+    "        rdf = regexRdf.search(html).group(1)        \n",
+    "    except Exception as e :\n",
+    "        svg = ''\n",
+    "        rdf = ''\n",
+    "        print(e)\n",
+    "    return(svg, rdf)\n",
+    "\n",
+    "def zipdir(path, ziph):\n",
+    "    # ziph is zipfile handle\n",
+    "    for root, dirs, files in os.walk(path):\n",
+    "        for file in files:\n",
+    "            if not('orig' in root):\n",
+    "                ziph.write(os.path.join(root, file), \n",
+    "                    os.path.relpath(os.path.join(root, file), \n",
+    "                    os.path.join(path, '..')))\n",
+    "\n",
+    "def addBaseUri(rdfStr):\n",
+    "    regexBaseUri = re.compile(\"http://rdf-unl.org.*?sentence.*?ontology\")\n",
+    "    baseUri = regexBaseUri.search(rdfStr).group()\n",
+    "    rdfStr = \"# baseURI: \"+baseUri+\"\\n\"+rdfStr\n",
+    "    return(rdfStr)\n",
+    "\n",
+    "def postEditRdf(rdfPath, frStr, enStr):\n",
+    "    textID = rdfPath.rsplit('/', 1)[0]\n",
+    "    newPrefix = \"http://unsel.rdf-unl.org/\"+textID\n",
+    "    with open(rdfPath,'r') as rdfFile :\n",
+    "        rdfStr = rdfFile.read()\n",
+    "        rdfFile.close()\n",
+    "    regexBaseUri = re.compile(\"http://rdf-unl.org.*?sentence.*?ontology\")\n",
+    "    rdfStr = rdfStr.replace('rdfs:label \"TBD : phrase en langue naturelle\"@inv ;', \n",
+    "                            '<https://unl.tetras-libre.fr/rdf/schema#has_id> \"{}\" ;\\n'.format(textID.split('/')[-2])+'rdfs:label \"\"\"{}\"\"\"@fr ;\\n'.format(frStr)+'    rdfs:label \"\"\"{}\"\"\"@en ;\\n'.format(enStr))\n",
+    "    baseUri = regexBaseUri.search(rdfStr).group()\n",
+    "    oldPrefix = baseUri.rsplit('/', 1)[0]\n",
+    "    rdfStr = rdfStr.replace(oldPrefix+'#ontology', newPrefix.rsplit('/', 1)[0]+'#ontology')    \n",
+    "    rdfStr = rdfStr.replace(oldPrefix+'#', \"http://unsel.rdf-unl.org/uw_lexeme#\")\n",
+    "    rdfStr = \"# baseURI: \"+baseUri+\"\\n @prefix :     <\"+baseUri.replace(\"ontology\",\"\")+\"> .\\n\"+rdfStr\n",
+    "    rdfStr = rdfStr.replace(oldPrefix, newPrefix)\n",
+    "    with open(rdfPath,'w') as rdfFile :\n",
+    "        rdfStr = rdfFile.write(rdfStr)\n",
+    "        rdfFile.close()\n",
+    "\n",
+    "def replaceInplace(filePath, searchText, replaceText):\n",
+    "    #read input file\n",
+    "    fin = open(filePath, \"rt\")\n",
+    "    #read file contents to string\n",
+    "    data = fin.read()\n",
+    "    #replace all occurrences of the required string\n",
+    "    data = data.replace(searchText, replaceText)\n",
+    "    #close the input file\n",
+    "    fin.close()\n",
+    "    #open the input file in write mode\n",
+    "    fin = open(filePath, \"wt\")\n",
+    "    #overrite the input file with the resulting data\n",
+    "    fin.write(data)\n",
+    "    #close the file\n",
+    "    fin.close()\n",
+    "    \n",
+    "def createStoreDirectory(unlStr, srsaRef):\n",
+    "    storeDir = storeBase+srsaRef+\"/current/\"\n",
+    "    regexFr = re.compile(\"{org:fr}\\n(.*?)\\n{/org}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        frStr = regexFr.search(unlStr).group(1)\n",
+    "    except AttributeError:\n",
+    "        frStr = ''    \n",
+    "    enStr = ''\n",
+    "    # Create a directory named after 'Référence'\n",
+    "    try :\n",
+    "        os.makedirs(storeDir)\n",
+    "    except FileExistsError:\n",
+    "        pass\n",
+    "    # Add english translation to unl code\n",
+    "    unlStr = unlStr.replace(\"{/org}\", \"{{/org}}\\n{{en}}\\n{enStr}\\n{{/en}}\".format(enStr=enStr))  \n",
+    "    # Write UNL code to a file\n",
+    "    with open(storeDir+srsaRef+'.unl','w') as unlFile:\n",
+    "        unlFile.write(unlStr)\n",
+    "    os.chmod(storeDir+srsaRef+'.unl',0o766)\n",
+    "    # Send UNL code to https://unl.demo.tetras-libre.fr/unl2rdf to get SVG and RDF\n",
+    "    #svg, rdf = unl2dotWeb(unlStr)\n",
+    "    \n",
+    "    # Use unltools jar to create ttl and dot file from unl    \n",
+    "    unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'rdf')\n",
+    "    postEditRdf(storeDir+srsaRef+'.ttl', frStr, enStr)\n",
+    "    unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'dot')\n",
+    "    \n",
+    "    # Generate svg and png\n",
+    "    graphviz.render('dot', 'svg', storeDir+srsaRef+'.dot') \n",
+    "    graphviz.render('dot', 'png', storeDir+srsaRef+'.dot')\n",
+    "    # Rename generated svg and png so they are not named like file.dot.svg\n",
+    "    svgList = glob(storeDir+srsaRef+\"*.svg\")\n",
+    "    for svgPath in svgList:\n",
+    "        svgNewPath = svgPath.replace(\".dot\",\"\")\n",
+    "        os.rename(svgPath, svgNewPath)\n",
+    "    pngList = glob(storeDir+srsaRef+\"*.png\")\n",
+    "    for pngPath in pngList:\n",
+    "        pngNewPath = pngPath.replace(\".dot\",\"\")\n",
+    "        os.rename(pngPath, pngNewPath)\n",
+    "    \n",
+    "    # Add full text sentences to the svg\n",
+    "    replaceInplace(storeDir+srsaRef+'.svg', '</svg>','''<text x=\"0\" y=\"-40\">fr : {fr}</text>\n",
+    "<text x=\"0\" y=\"-20\">en : {en}</text>\n",
+    "</svg>'''.format(fr=frStr,  en=enStr))\n",
+    "    \n",
+    "    #svgWithTxt = svg.replace('</svg>','''<text x=\"0\" y=\"-40\">fr : {fr}</text>\n",
+    "#<text x=\"0\" y=\"-20\">en : {en}</text>\n",
+    "#</svg>'''.format(fr=frStr,  en=enStr))  \n",
+    "    \n",
+    "    #with open(storeDir+srsaRef+'.svg','w') as svgFile:\n",
+    "    #    svgFile.write(svgWithTxt)\n",
+    "    #os.chmod(storeDir+srsaRef+'.svg',0o766)\n",
+    "    #with open(storeDir+srsaRef+'.ttl','w') as rdfFile:\n",
+    "    #    rdfFile.write(rdf)\n",
+    "    os.chmod(storeDir+srsaRef+'.ttl',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.svg',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.png',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.dot',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.unl',0o766)\n",
+    "\n",
+    "    # Convert svg to png and write to a file\n",
+    "    #try:\n",
+    "    #    svg2png(bytestring=svgWithTxt, write_to=storeDir+srsaRef+'.png')\n",
+    "    #except :\n",
+    "    #    pass\n",
+    "    shutil.copytree(storeDir, storeBase+srsaRef+\"/orig/\") \n",
+    "    with open(storeBase+srsaRef+\"/current/\"+srsaRef+'.comments','w') as commentFile:\n",
+    "        commentFile.write(\"[David] : Saisissez vos commentaires en commençant par votre nom, n'oubliez pas d'enregistrer : \")\n",
+    "    os.chmod(storeBase+srsaRef+\"/current/\"+srsaRef+'.comments',0o766)\n",
+    "\n",
+    "def writeUnlFiles(unlStr, storePrefix):\n",
+    "    srsaRef = selectDir.value\n",
+    "    with open(storePrefix+'.unl','w') as unlFile:\n",
+    "        unlFile.write(unlStr)\n",
+    "        unlFile.close()   \n",
+    "        \n",
+    "def createFolderFromUnselInter(srsaRef):\n",
+    "    url = 'https://lingwarium.org/UNseL-inter/GrapheUNL.txt'\n",
+    "    unlStr = urlopen(url).read().decode('utf-8').replace('[P:1]','').replace('[/P]','')\n",
+    "    createStoreDirectory(unlStr, srsaRef)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7b32d69a-52fb-4b9d-8cd9-5fb45c177284",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Extraction\n",
+    "#######################################################################################################\n",
+    "\n",
+    "def run_command(cmd):\n",
+    "    with Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) as p:\n",
+    "        p.poll()\n",
+    "        p.stdout.flush()\n",
+    "        p.stderr.flush()\n",
+    "        stdout, stderr = p.communicate()\n",
+    "    return p.returncode, stdout, stderr\n",
+    "\n",
+    "# Run SHACL infer sh script. mode argument can take the values 'infer' or 'validate'\n",
+    "def shaclInfer(ttlPath, mode, ttlRulesPath = ''):\n",
+    "    if ttlRulesPath == '':\n",
+    "        cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath]\n",
+    "    else:\n",
+    "        cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath, '-shapesfile', ttlRulesPath]\n",
+    "    #cmd = ' '.join(cmd)    \n",
+    "    #!{cmd}\n",
+    "    code, out, err = run_command(cmd)\n",
+    "    if code != 0:\n",
+    "         print(\"Error in SHACL script: \\n\\n\"+err)\n",
+    "    else:\n",
+    "        return(out)\n",
+    "    \n",
+    "def export_result(g):\n",
+    "    export_file = 'output.ttl'\n",
+    "    g.serialize(destination=export_file, base=base_uri, format='turtle')\n",
+    "    \n",
+    "#shaclInfer('/opt/dashboards/tetras-lab-unl-demos/demo-cctp-40.ttl', 'infer')\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5c7164d7-c074-4aa3-9776-0cc5cc8f18f7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#==============================================================================\n",
+    "# TENET: prepare work data\n",
+    "#------------------------------------------------------------------------------\n",
+    "# Prepare work data for extraction processing. \n",
+    "#==============================================================================\n",
+    "\n",
+    "#==============================================================================\n",
+    "# Parameters\n",
+    "#==============================================================================\n",
+    "\n",
+    "# Working directories\n",
+    "CONFIG_DIR = \"/opt/dashboards/tools/tenet/config/\"\n",
+    "FRAME_DIR = \"/opt/dashboards/tools/tenet/frame/\"\n",
+    "CORPUS_DIR = storeBase\n",
+    "OUTPUT_DIR = \"output/\"\n",
+    "\n",
+    "# Config Definition\n",
+    "TURTLE_SUFFIX = \".ttl\"\n",
+    "frame_file = \"system-ontology.ttl\"\n",
+    "dash_file = \"dash-data-shapes.ttl\" # data from \"http://datashapes.org/dash.ttl\"\n",
+    "schema_file = \"unl-rdf-schema.ttl\"\n",
+    "semantic_net_file = \"semantic-net.ttl\"\n",
+    "cts_file = \"transduction-schemes.ttl\"\n",
+    "c_param_file = \"config-parameters.ttl\"\n",
+    "\n",
+    "# Dev Tests\n",
+    "base_uri = \"https://unsel.tetras-libre.fr/tenet/working\"\n",
+    "\n",
+    "#==============================================================================\n",
+    "# Graph Initialization\n",
+    "#==============================================================================\n",
+    "    \n",
+    "def load_config(work_graph):    \n",
+    "    file_ref = CONFIG_DIR + schema_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + semantic_net_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + dash_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + c_param_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "\n",
+    "def load_frame(work_graph):\n",
+    "    file_ref = FRAME_DIR + frame_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "\n",
+    "#def define_namespace(work_graph):\n",
+    "#    print(\"-- Namespace Definition:\")\n",
+    "#    \n",
+    "#    sys_uri = \"https://unsel.tetras-libre.fr/tenet/frame/system-ontology/\"\n",
+    "#    concept_classes = [\"agent\"]\n",
+    "#    for concept in concept_classes:\n",
+    "#        new_prefix = \"sys-\" + concept\n",
+    "#        new_uri = URIRef(sys_uri + concept + '#') \n",
+    "#        work_graph.namespace_manager.bind(new_prefix, new_uri)\n",
+    "#        print(\"----- \" + new_prefix + \": \" + new_uri)\n",
+    "#    print(list(work_graph.namespace_manager.namespaces()))  \n",
+    "    \n",
+    "def load_sentences(work_graph, corpus):\n",
+    "    target_ref = CORPUS_DIR + corpus + '/current/*.ttl'\n",
+    "    for file_ref in glob(target_ref):\n",
+    "        if 'factoid' not in file_ref :\n",
+    "            # Patch for new UWs with for instance .§A in restrictions\n",
+    "            \n",
+    "            work_graph.parse(file_ref)\n",
+    "                      \n",
+    "def load_cts(work_graph):    \n",
+    "    file_ref = CONFIG_DIR + cts_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "        \n",
+    "#==============================================================================\n",
+    "# Result (export)\n",
+    "#==============================================================================\n",
+    "     \n",
+    "def export_result(work_graph, export_ref, export_file):    \n",
+    "    work_graph.serialize(destination=export_file, \n",
+    "                         base=base_uri + '/' + export_ref, \n",
+    "                         format='turtle')\n",
+    "    \n",
+    "    \n",
+    "def finalize_export_file(export_file):\n",
+    "    \"\"\" finalize the export file by adding some useful prefixes \"\"\"\n",
+    "    \n",
+    "    with open(export_file, \"rt\") as file:\n",
+    "        x = file.read()\n",
+    "    \n",
+    "    with open(export_file, \"wt\") as file:\n",
+    "        x = x.replace(\n",
+    "            \"@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .\",\n",
+    "            \"\"\"\n",
+    "            @prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .\n",
+    "            @prefix sys-class: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/class/> .\n",
+    "            @prefix sys-property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/property/> .\n",
+    "            @prefix sys-relation: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/relation/> .\n",
+    "            @prefix sys-Event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/Event#> .\n",
+    "            @prefix sys-event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/eventObjectProperty#> .\n",
+    "            @prefix sys-State_Property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/State_Property#> .\n",
+    "            @prefix sys-stateProperty: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/statePropertyObjectProperty#> .\n",
+    "            @prefix sys-abstract_thing: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/abstract_thing#> .\n",
+    "            @prefix sys-action_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/action_verb#> .\n",
+    "            @prefix sys-agent: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/agent#> .\n",
+    "            @prefix sys-attributive_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/attributive_verb#> .\n",
+    "            @prefix sys-component: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/component#> .\n",
+    "            @prefix sys-message: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/message#> .\n",
+    "            @prefix sys-place: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/place#> .\n",
+    "            \"\"\")\n",
+    "        file.write(x)\n",
+    "        \n",
+    "#==============================================================================\n",
+    "# Main Function\n",
+    "#==============================================================================\n",
+    "  \n",
+    "def createTenetGraph(corpus):\n",
+    "    try:    \n",
+    "        work_graph = Graph()\n",
+    "        load_config(work_graph)   \n",
+    "        load_frame(work_graph) \n",
+    "        #define_namespace(work_graph)\n",
+    "        load_cts(work_graph)\n",
+    "        load_sentences(work_graph, corpus)\n",
+    "        output_file = extractionGraph\n",
+    "        export_result(work_graph, corpus, output_file)\n",
+    "        finalize_export_file(output_file)\n",
+    "        return(work_graph)\n",
+    "    except Exception as e :\n",
+    "        print(\"!!! An exception occurred importing rdf graphs for extraction !!!\\n\"+str(e)) \n",
+    "        \n",
+    "def addSentenceInTenetGraph(work_graph, sentence_ref):\n",
+    "    \"\"\" TODO: add a sentence to work_graph \"\"\"\n",
+    "    pass\n",
+    "        \n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f66bfcd2-f2b9-4603-b1f2-d4fb643c8c3c",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "clearExecutionInstances = \"\"\"\n",
+    "    PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>\n",
+    "    PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "    PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "\n",
+    "    DELETE {?x rdf:type ?c}\n",
+    "    WHERE {\n",
+    "        ?c rdfs:subClassOf* cts:Transduction_Schemes .\n",
+    "        ?x rdf:type ?c .\n",
+    "    }\n",
+    "\"\"\"\n",
+    "\n",
+    "addExecutionInstance = \"\"\"\n",
+    "    PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>\n",
+    "    PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "    PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "\n",
+    "    INSERT DATA {{<exec_instance> rdf:type {}}}\n",
+    "\"\"\"\n",
+    "\n",
+    "def owl2vowl(ttlFilePath, importList=[]):\n",
+    "    # Run java parser\n",
+    "    if importList == []:\n",
+    "        cmd = ['java', '-jar', owl2vowlPath,\n",
+    "           '-file', ttlFilePath]  \n",
+    "    else:\n",
+    "        cmd = ['java', '-jar', owl2vowlPath,\n",
+    "           '-file', ttlFilePath,\n",
+    "           '-dependencies'] + importList \n",
+    "    with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:\n",
+    "        p.wait()\n",
+    "        p.stdout.flush()\n",
+    "        if p.returncode != 0:\n",
+    "            print(\"Error in owl2vowl: \\n\\n\"+p.stdout.read().decode())\n",
+    "    outFileName = ttlFilePath.split('/')[-1].replace('ttl','json')\n",
+    "    os.rename(outFileName, '/opt/webvowl/'+outFileName)\n",
+    "    \n",
+    "def applyInferStep(uuidStr, graph, step):\n",
+    "    step_ref = \"cts:\" + step\n",
+    "    dest_file = workDir + uuidStr + '-' + step + \".ttl\"\n",
+    "    base_ref = \"http://\" + uuidStr + '/' + step\n",
+    "    graph.update(clearExecutionInstances)\n",
+    "    graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'\n",
+    "    graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference\n",
+    "    work_file = dest_file\n",
+    "    inferResult = shaclInfer(work_file, 'infer') # apply SHACL inference\n",
+    "    graph.parse(data=inferResult) # update graph with inference\n",
+    "    #graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference\n",
+    "    return graph, inferResult"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "40b54849-9333-4819-b953-6e816ffe474c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Validation\n",
+    "#######################################################################################################\n",
+    "def pyshaclValidate():\n",
+    "    from pyshacl import validate\n",
+    "    data_file = open('tmp.ttl').read()\n",
+    "    shapes_file = open('test-shacl-construct.shapes-order.ttl').read()\n",
+    "    conforms, v_graph, v_text = validate(data_file, shacl_graph=shapes_file)\n",
+    "    print(conforms)\n",
+    "    print(v_graph)\n",
+    "    print(v_text)\n",
+    "    \n",
+    "def loadFactoids(directory):\n",
+    "    ttlList = glob(directory+\"/*/current/*_factoid.ttl\")\n",
+    "    g = Graph()\n",
+    "    for ttl in ttlList :\n",
+    "        g.parse(ttl)\n",
+    "    g.parse('/opt/dashboards/tools/tenet/frame/system-ontology.ttl')\n",
+    "    return(g)\n",
+    "\n",
+    "def loadSentences(directory):\n",
+    "    ttlList = glob(directory+\"/*/current/*.ttl\")\n",
+    "    g = Graph()\n",
+    "    for ttl in ttlList :\n",
+    "        if 'factoid' not in ttl:\n",
+    "            g.parse(ttl)\n",
+    "    g.parse('/opt/dashboards/tools/tenet/config/unl-rdf-schema.ttl')\n",
+    "    return(g)\n",
+    "\n",
+    "possibleUnderspecificationQuery ='''\n",
+    "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?thisId (CONCAT('Parle-t-on bien de tous les \"', ?entityLabel, '\" possibles ? (', ?subEntities, ')' ) AS ?message)\n",
+    "WHERE {\n",
+    "FILTER(?count>1)\n",
+    "{SELECT ?this ?thisId ?entityLabel (GROUP_CONCAT(?subEntityLabel;SEPARATOR=\", \") AS ?subEntities) (COUNT(?subEntityLabel) AS ?count)\n",
+    "WHERE {\n",
+    "\t?subEntity rdfs:subClassOf ?entity ; rdfs:label ?subEntityLabel .\n",
+    "\t{SELECT ?this ?entity ?thisId ?entityLabel\n",
+    "\tWHERE {\n",
+    "        BIND(\"##ID##\" AS ?thisId)\n",
+    "    \t?this a unl:UNL_Sentence ; unl:has_id ?thisId .\n",
+    "\t\t?entity sys:from_structure ?this ; \n",
+    "\t\t\trdfs:subClassOf+ sys:Structure ;\n",
+    "\t\t\trdfs:label ?entityLabel .\n",
+    "\t\t FILTER (\n",
+    "     \t\t!EXISTS {?subEntity1 rdfs:subClassOf ?entity; sys:from_structure ?this}\n",
+    "     )\n",
+    "\t}} \n",
+    "}\n",
+    "GROUP BY ?this ?thisId ?entityLabel }\n",
+    "}\n",
+    "'''\n",
+    "\n",
+    "possibleClassEquivalenceQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT (CONCAT(?messageTMP, ?sentenceList, ')') AS ?message) ?sentenceList\n",
+    "WHERE {\n",
+    "FILTER(regex(?sentenceList, \"##ID##\" ))\n",
+    "{SELECT   ?messageTMP (GROUP_CONCAT(?sentenceId; SEPARATOR=\", \")  AS ?sentenceList)\n",
+    "WHERE {\t\n",
+    "\tSELECT DISTINCT ?messageTMP ?sentenceId\n",
+    "\tWHERE {\n",
+    "\t\tFILTER (?count = 1)\n",
+    "\t\t?subEntity rdfs:subClassOf ?this ; rdfs:label ?subEntityLabel ; sys:from_structure ?subEntitySentence  .\n",
+    "\t\t?this rdfs:label ?thisLabel ; sys:from_structure ?thisSentence .\n",
+    "    \tBIND(CONCAT('\"', ?subEntityLabel, '\" est la seule sous classe de \"', ?thisLabel, '\". Ces classes sont-elles équivalentes ? <br/>(voir les exigences ') AS ?messageTMP)\n",
+    "    \t\t{\n",
+    "            ?thisSentence unl:has_id ?thisSentenceId .\n",
+    "\t\t\tBIND (?thisSentenceId AS ?sentenceId)} \n",
+    "\t\tUNION \n",
+    "\t\t\t{?subEntitySentence unl:has_id ?subEntitySentenceId .\n",
+    "\t\t\tBIND (?subEntitySentenceId AS ?sentenceId)}\n",
+    "\tFILTER(NOT EXISTS {?subEntity sys:from_structure ?thisSentence})\n",
+    "\t\t{SELECT ?this (COUNT(?subClass) AS ?count)\n",
+    "\t\tWHERE {\n",
+    "    \t    \t?this rdfs:subClassOf+ sys:Structure .\n",
+    "\t\t\t\t?subClass rdfs:subClassOf ?this\n",
+    "\t\t} GROUP BY ?this }\n",
+    "\t} ORDER BY ?sentenceId\n",
+    "} GROUP BY ?messageTMP }\n",
+    "}'''\n",
+    "\n",
+    "unfeaturedDomainOrRangeQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?sentenceId (CONCAT(?messageTMP, GROUP_CONCAT(?featureLabel ; SEPARATOR=', ')) AS ?message)\n",
+    "WHERE {\n",
+    "    SELECT DISTINCT ?sentenceId ?featureLabel (CONCAT( 'Dans cette exigence, \"', ?unfeaturedLabel, '\" pourrait être précisé par : ') AS ?messageTMP)\n",
+    "    WHERE {\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:Property ;\n",
+    "            rdfs:domain ?featured ; \n",
+    "            rdfs:range ?unfeatured .\n",
+    "        }\n",
+    "    UNION\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:Property ;\n",
+    "            rdfs:domain ?unfeatured ; \n",
+    "            rdfs:range ?featured .\n",
+    "        }\n",
+    "    #?p rdfs:label ?pLabel .\n",
+    "    ?featured sys:has_feature ?feature .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeatured sys:has_feature ?feature\n",
+    "    })\n",
+    "    ?featuredInstance a ?featured ; sys:from_structure ?sentence.\n",
+    "    ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.\n",
+    "    BIND(\"##ID##\" AS ?sentenceId)\n",
+    "    ?sentence unl:has_id ?sentenceId .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?featuredInstance a ?featured2 .\n",
+    "        ?featured2 rdfs:subClassOf ?featured .\n",
+    "    })\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeaturedInstance a ?unfeatured2 .\n",
+    "        ?unfeatured2 rdfs:subClassOf ?unfeatured .\n",
+    "    })\n",
+    "    ?featured rdfs:label ?featuredLabel .\n",
+    "    ?unfeatured rdfs:label ?unfeaturedLabel .\n",
+    "    ?feature rdfs:label ?featureLabel .\n",
+    "}\n",
+    "    } GROUP BY ?sentenceId ?messageTMP\n",
+    "'''\n",
+    "\n",
+    "unfeaturedDomainOrRangeWithRefQuery ='''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?sentenceId (CONCAT('\"', ?unfeaturedLabel, '\" pourrait être précisé par un ou plusieurs attributs parmis : ', ?featureList, '. <br/>(exemples de référence : ', GROUP_CONCAT(?sentence2id ; SEPARATOR=', '), ').') AS ?message)  \n",
+    "WHERE {\n",
+    "{SELECT DISTINCT ?sentenceId ?unfeaturedLabel  ?sentence2id (GROUP_CONCAT(?featureLabel ; SEPARATOR=', ') AS ?featureList) #\n",
+    "WHERE {\n",
+    "    SELECT DISTINCT ?sentenceId ?sentence2id ?unfeaturedLabel ?featureLabel ?otherwiseFeaturedLabel ?featured2label\n",
+    "    WHERE {\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:Property ;\n",
+    "            rdfs:domain ?featured ; \n",
+    "            rdfs:range ?unfeatured .\n",
+    "        }\n",
+    "    UNION\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:Property ;\n",
+    "            rdfs:domain ?unfeatured ; \n",
+    "            rdfs:range ?featured .\n",
+    "        }\n",
+    "    #?p rdfs:label ?pLabel .\n",
+    "    ?featured sys:has_feature ?feature .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeatured sys:has_feature ?feature\n",
+    "    })\n",
+    "    ?featuredInstance a ?featured ; sys:from_structure ?sentence.\n",
+    "    ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.\n",
+    "    BIND(\"##ID##\" AS ?sentenceId)\n",
+    "    ?sentence unl:has_id ?sentenceId .\n",
+    "\n",
+    "\t?otherwiseFeatured rdfs:subClassOf ?unfeatured ; sys:has_feature ?feature2 ; rdfs:label ?otherwiseFeaturedLabel.\n",
+    "    ?otherwiseFeaturedInstance a ?otherwiseFeatured  ; sys:from_structure ?sentence2.\t\n",
+    "\t?sentence2 unl:has_id ?sentence2id .\n",
+    "\t{?otherwiseFeaturedInstance ?p2 ?featuredInstance2} UNION { ?featuredInstance2 ?p2 ?otherwiseFeaturedInstance}\n",
+    "\t?featuredInstance2 a ?featured2 .\n",
+    "\t?featured2  sys:has_feature ?feature2 ; rdfs:label ?featured2label.\n",
+    "\t\t\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?featuredInstance a ?featured2 .\n",
+    "        ?featured2 rdfs:subClassOf ?featured .\n",
+    "    })\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeaturedInstance a ?unfeatured2 .\n",
+    "        ?unfeatured2 rdfs:subClassOf ?unfeatured .\n",
+    "    })\n",
+    "    ?featured rdfs:label ?featuredLabel .\n",
+    "    ?unfeatured rdfs:label ?unfeaturedLabel .\n",
+    "    ?feature rdfs:label ?featureLabel .\n",
+    "    }\n",
+    "} GROUP BY ?sentenceId ?unfeaturedLabel  ?sentence2id}\n",
+    "} GROUP BY ?sentenceId ?unfeaturedLabel  ?featureList\n",
+    "'''\n",
+    "\n",
+    "queryTypeDic = {'error':[possibleClassEquivalenceQuery], \n",
+    "                'warning':[possibleUnderspecificationQuery], \n",
+    "                'info':[unfeaturedDomainOrRangeWithRefQuery]}\n",
+    "\n",
+    "# On charge les factoïdes puis les exigences pour vérification des nouvelles phrases\n",
+    "g = loadFactoids(storeBase)\n",
+    "sparqlUpdate.setQuery('INSERT DATA {'+g.serialize(format='nt')+'}')\n",
+    "sparqlLog = sparqlUpdate.query()\n",
+    "g.serialize(destination='/opt/dashboards/store/extraction2.ttl', format='turtle')\n",
+    "owl2vowl('/opt/dashboards/store/extraction2.ttl')\n",
+    "\n",
+    "g = loadSentences(storeBase)\n",
+    "sparqlUpdate.setQuery('INSERT DATA {'+g.serialize(format='nt')+'}')\n",
+    "sparqlLog = sparqlUpdate.query()\n",
+    "\n",
+    "# Requête de post-traitement pour propager les instances aux sous classes pour chaque phrase\n",
+    "# -> Il faudra sans doute revoir la classe qui accueille les instances à l'extraction\n",
+    "def instToSubclasses():\n",
+    "    instToSubclassesQuery = \"\"\"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "INSERT {?inst a ?subClass .}\n",
+    "WHERE {  \n",
+    "  ?class rdfs:subClassOf* sys:Structure ; sys:from_structure ?sentence .\n",
+    "  ?subClass rdfs:subClassOf+ ?class ; sys:from_structure ?sentence .  \n",
+    "  ?inst a ?class ; sys:from_structure ?sentence .  \n",
+    "}\"\"\"\n",
+    "    sparqlUpdate.setQuery(instToSubclassesQuery)\n",
+    "    sparqlLog = sparqlUpdate.query()\n",
+    "\n",
+    "instToSubclasses()\n",
+    "    \n",
+    "#factoidList = [x.replace('..','/opt/dashboards') for x in glob(storeBase+\"/*/current/*factoid.ttl\")]\n",
+    "#owl2vowl(FRAME_DIR + frame_file, importList=factoidList)\n",
+    "\n",
+    "def createVerificationMessages(directory):\n",
+    "    reqId = directory.split('/')[-1]\n",
+    "    for messageType in ['error', 'warning', 'info']:\n",
+    "        #print(messageType)\n",
+    "        messagesStr = ''\n",
+    "        for queryTMP in queryTypeDic[messageType] :\n",
+    "            query = queryTMP.replace(\"##ID##\",reqId)\n",
+    "            sparqlQuery.setQuery(query)\n",
+    "            results = sparqlQuery.query().convert()\n",
+    "            for result in results[\"results\"][\"bindings\"]:\n",
+    "                message = result[\"message\"][\"value\"]\n",
+    "                if message not in messagesStr :\n",
+    "                    #print(\"printing\")\n",
+    "                    messagesStr+=message+\"\\n\"\n",
+    "        outFile = open(directory+'/current/'+reqId+'_'+messageType+'Messages.txt', 'w')\n",
+    "        outFile.write(messagesStr)\n",
+    "        outFile.close() \n",
+    "\n",
+    "def createOnto(uuidStr): \n",
+    "    # -- Initialization (creation of extraction graph)\n",
+    "    step = 'init_graph'\n",
+    "    dest_file = workDir + uuidStr + '-' + step + \".ttl\"\n",
+    "    base_ref = \"http://\" + uuidStr + '/' + step\n",
+    "    graph = createTenetGraph(uuidStr)\n",
+    "    graph.serialize(destination=dest_file, base=base_ref, format='turtle')   \n",
+    "    # -- Extraction\n",
+    "    graph, _ = applyInferStep(uuidStr, graph, 'preprocessing')\n",
+    "    graph, _ = applyInferStep(uuidStr, graph, 'net_extension')\n",
+    "    graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation_dga_patch')\n",
+    "    # -- Write result \n",
+    "    factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'\n",
+    "    sentencePath = storeBase+uuidStr+'/current/'+uuidStr+'.ttl'\n",
+    "    with open(factoidPath, 'w') as outfile:\n",
+    "         outfile.write(finalInferResult)\n",
+    "         outfile.close()\n",
+    "    # -- Webvowl\n",
+    "    owl2vowl(factoidPath)\n",
+    "    # -- Verification\n",
+    "    factoidGraph = Graph()\n",
+    "    factoidGraph.parse(factoidPath)\n",
+    "    factoidGraph.parse(sentencePath)    \n",
+    "    sparqlUpdate.setQuery('INSERT DATA {'+factoidGraph.serialize(format='nt')+'}')\n",
+    "    sparqlLog = sparqlUpdate.query()\n",
+    "    instToSubclasses()\n",
+    "    createVerificationMessages(storeBase+uuidStr)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "744abdb9-b3d6-4025-abc9-2f749644c3ed",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# Fonctions pour la mise à jour globale du corpus (ne pas exécuter en mode \"Tableau de bord\")\n",
+    "\n",
+    "def updateAllFactoids():\n",
+    "    dirList = ['/opt/dashboards/store/CCTP-SRSA-IP-20210831/SRSA-IP_STB_PHON_00100']\n",
+    "    #dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "    for directory in dirList:\n",
+    "        if directory.split('/')[-1] != '0_NONE':\n",
+    "            print(directory)\n",
+    "            reqId = directory.split('/')[-1]\n",
+    "            createOnto(reqId)\n",
+    "\n",
+    "#updateAllFactoids()\n",
+    "        \n",
+    "        \n",
+    "def updateAllVerificationMessages():\n",
+    "    #dirList = ['/opt/dashboards/store/CCTP-SRSA-IP-20210831/SRSA-IP_STB_PHON_00100']\n",
+    "    dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "    for directory in dirList:\n",
+    "        if directory.split('/')[-1] != '0_NONE':\n",
+    "            print(directory)\n",
+    "            createVerificationMessages(directory)   \n",
+    "\n",
+    "#updateAllVerificationMessages()\n",
+    "\n",
+    "            \n",
+    "#query = possibleUnderspecificationQuery.replace(\"##ID##\",\"SRSA-IP_STB_PHON_00500\")\n",
+    "\n",
+    "#for r in g.query(query):\n",
+    "    #print(r['message'])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ca43f1f2-42ef-4355-a2e2-e27351a51b96",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Navigateur / éditeur de corpus UNL\n",
+    "#######################################################################################################\n",
+    "\n",
+    "saveButtonClicks = 0\n",
+    "\n",
+    "def main_pane(directory):    \n",
+    "    saveButtonClicks = 0\n",
+    "    saveButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)\n",
+    "    saveButtonDic = dict(button=saveButton)\n",
+    "    saveCommentButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)\n",
+    "   \n",
+    "    path = storeBase+directory+'/current/'\n",
+    "    pathOrig = storeBase+directory+'/orig/'\n",
+    "    svgPath = path+directory+'.svg'\n",
+    "    pngPath = path+directory+'.png'\n",
+    "    unlPath = path+directory+'.unl'\n",
+    "    rdfPath = path+directory+'.ttl'\n",
+    "    commentPath = path+directory+'.comments'\n",
+    "    with open(commentPath) as commentFile:\n",
+    "        commentStr = commentFile.read() \n",
+    "        commentFile.close()\n",
+    "    with open(unlPath) as unlFile:\n",
+    "        unlStr = unlFile.read()\n",
+    "        unlFile.close()\n",
+    "    svgPathOrig = pathOrig+directory+'.svg'\n",
+    "    pngPathOrig = pathOrig+directory+'.png'\n",
+    "    unlPathOrig = pathOrig+directory+'.unl'\n",
+    "    rdfPathOrig = pathOrig+directory+'.ttl'\n",
+    "    with open(unlPathOrig) as unlFileOrig:\n",
+    "        unlStrOrig = unlFileOrig.read()\n",
+    "        unlFileOrig.close()\n",
+    "    unlHtmlOrig = unlStrOrig.replace(\"\\n\",\"<br/>\")\n",
+    "    if unlStrOrig == unlStr:\n",
+    "        modIndicator = ''\n",
+    "    else:\n",
+    "        modIndicator = ' <u>modifié</u>'\n",
+    "    regexFr = re.compile(\"{org:fr}\\n(.*?)\\n{/org}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        frStr = regexFr.search(unlStr).group(1)\n",
+    "    except AttributeError:\n",
+    "        frStr = ''\n",
+    "    regexEn = re.compile(\"{en}\\n(.*?)\\n{/en}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        enStr = regexEn.search(unlStr).group(1)  \n",
+    "    except AttributeError:\n",
+    "        enStr = ''\n",
+    "        \n",
+    "    unlOrig_html = pn.pane.HTML(unlHtmlOrig)\n",
+    "    unl_input = pn.widgets.input.TextAreaInput(height=400)\n",
+    "    unl_input.value = unlStr\n",
+    "    comment_input = pn.widgets.input.TextAreaInput(height=300)\n",
+    "    comment_input.value = commentStr\n",
+    "    \n",
+    "    downloadSvg = pn.widgets.FileDownload(sizing_mode='stretch_width', file=svgPath, embed=True, name='Télécharger le graphe en SVG :')\n",
+    "    downloadPng = pn.widgets.FileDownload(sizing_mode='stretch_width', file=pngPath, embed=True, name='Télécharger le graphe en PNG :')    \n",
+    "    downloadRdf = pn.widgets.FileDownload(sizing_mode='stretch_width', file=rdfPath, embed=True, name='Télécharger le code UNL-RDF :')\n",
+    "    downloadUnl = pn.widgets.FileDownload(sizing_mode='stretch_width', file=unlPath, embed=True, name='Télécharger le code UNL :')\n",
+    "    \n",
+    "    def compute_unl_graph_pane(button):\n",
+    "        global saveButtonClicks\n",
+    "        if saveButtonClicks != 0:\n",
+    "            writeUnlFiles(unl_input.value, storeBase+selectDir.value+'/current/'+selectDir.value)\n",
+    "        pane = pn.pane.PNG(pngPath, width = pane_width)\n",
+    "        saveButtonClicks += 1\n",
+    "        return(pane)\n",
+    "    \n",
+    "    unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)\n",
+    "    \n",
+    "    \n",
+    "    \n",
+    "    warnColumn = pn.Card(width=pane_width, header = \"**Alertes pour l'exigence :**\")\n",
+    "    warnings = 0\n",
+    "    try:    \n",
+    "        errorfile = open(storeBase+directory+'/current/'+directory+'_errorMessages.txt','r')\n",
+    "        errorlist = errorfile.readlines()\n",
+    "        errorfile.close()\n",
+    "        for error in errorlist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-danger\">{}</div>'.format(error)))\n",
+    "        if errorlist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass\n",
+    "    try:    \n",
+    "        warnfile = open(storeBase+directory+'/current/'+directory+'_warningMessages.txt','r')\n",
+    "        warnlist = warnfile.readlines()\n",
+    "        warnfile.close()\n",
+    "        for warn in warnlist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-warning\">{}</div>'.format(warn)))\n",
+    "        if warnlist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass \n",
+    "    try:    \n",
+    "        infofile = open(storeBase+directory+'/current/'+directory+'_infoMessages.txt','r')\n",
+    "        infolist = infofile.readlines()\n",
+    "        infofile.close()\n",
+    "        for info in infolist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-info\">{}</div>'.format(info)))\n",
+    "        if infolist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass\n",
+    "    if warnings == 0:\n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-info\">Pas d\\'anomalie détectée</div>'))\n",
+    "                             \n",
+    "    pane = pn.Column(\n",
+    "            pn.Row(\n",
+    "                pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/webvowl#{}_factoid\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'ontologie construite</button><a>'.format(directory)),\n",
+    "                pn.Column(pn.pane.HTML('<font size=\"tiny\">Exigence sélectionnée : '+directory+'</font>'), sizing_mode='stretch_width'),  \n",
+    "                ),\n",
+    "            #pn.Card(\n",
+    "            #    pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "            #        title=\"Inline Frame Example\"\n",
+    "            #        width=\"{}\"\n",
+    "            #        height=\"800\"\n",
+    "            #        src=\"https://unsel.tetras-lab.io/webvowl/#{}_factoid\">\n",
+    "            #        </iframe>'''.format(pane_width,selectDir.value)),\n",
+    "            #        title = \"Visualiser le factoid\", width=pane_width, collapsed=True),\n",
+    "            warnColumn,\n",
+    "            pn.pane.HTML('FR : '+frStr),\n",
+    "            pn.pane.HTML('EN : '+enStr),\n",
+    "            unl_graph_pane[1],\n",
+    "            pn.Card(pn.Column(saveCommentButton, comment_input, width = pane_width),\n",
+    "                    header='**Commentaires**', \n",
+    "                    collapsed=True, width = pane_width),     \n",
+    "            pn.Card(pn.Column(saveButton, unl_input, width = pane_width),\n",
+    "                    header='**Code UNL**'+modIndicator, \n",
+    "                    collapsed=True, width = pane_width),\n",
+    "            pn.Card(pn.Column(\n",
+    "                        unlOrig_html,\n",
+    "                        pn.Card(pn.pane.PNG(pngPath, width = pane_width-20), header = \"**Graphe d'origine**\", width=pane_width-10)\n",
+    "                    ), \n",
+    "                    header=\"**Code UNL d'origine**\", \n",
+    "                    collapsed=True, width = pane_width),\n",
+    "            pn.WidgetBox(\n",
+    "                pn.Row(downloadPng, downloadSvg),\n",
+    "                pn.Row(downloadUnl, downloadRdf),\n",
+    "                width=pane_width,\n",
+    "            ),\n",
+    "            width=pane_width, \n",
+    "        )\n",
+    "    \n",
+    "    def saveComments(event):\n",
+    "        with open(commentPath, 'w') as commentFile:\n",
+    "            commentFile.write(comment_input.value) \n",
+    "            commentFile.close()\n",
+    "    saveCommentButton.on_click(saveComments)  \n",
+    "    return(pane)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5d4ec56e-d0bb-44c8-975b-49d409b6b160",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Navigation Interface\n",
+    "#######################################################################################################\n",
+    "\n",
+    "pathList = glob(storeBase+'*')\n",
+    "dirList = sorted([x.split('/')[-1] for x in pathList])\n",
+    "#warningList = sorted(list(errorReqDic.keys())+list(warningReqDic.keys())+list(infoReqDic.keys()))\n",
+    "warningList = []\n",
+    "\n",
+    "dirDic = {}\n",
+    "\n",
+    "for directory in dirList:\n",
+    "    if directory in warningList:\n",
+    "        dirDic['**'+directory] = directory\n",
+    "    else:\n",
+    "        dirDic[directory] = directory\n",
+    "\n",
+    "dirDic = dict(sorted(dirDic.items()))\n",
+    "        \n",
+    "selectDir = pn.widgets.Select(name='Sélectionnez une exigence : ', options=dirDic, width = 350)\n",
+    "selectDir.value = '0_NONE'\n",
+    "dir_selector = dict(directory=selectDir)#, save=saveButton)\n",
+    "\n",
+    "#######################################################################################################\n",
+    "# Analyse avec UNseL-inter\n",
+    "#######################################################################################################\n",
+    "def extractOnClick(event):\n",
+    "    uuidStr = \"COMP\"+str(uuid.uuid4())\n",
+    "    createFolderFromUnselInter(uuidStr)\n",
+    "    createOnto(uuidStr)    \n",
+    "    selectDir.options[uuidStr] = uuidStr\n",
+    "    selectDir.value = uuidStr\n",
+    "##################################################\n",
+    "    \n",
+    "buttonExtract = pn.widgets.Button(name=\"Construire l'ontologie de la phrase (factoïd)\", width=300)\n",
+    "buttonExtract.param.watch(extractOnClick, 'clicks')\n",
+    "\n",
+    "\n",
+    "unlNavigatorPane = pn.interact(lambda directory : main_pane(directory), **dir_selector)\n",
+    "\n",
+    "#unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)\n",
+    "\n",
+    "# Main interface\n",
+    "pn.Column(\n",
+    "pn.Card( pn.Row(\n",
+    "    pn.Spacer(sizing_mode='stretch_width'),\n",
+    "    pn.Column(\n",
+    "    pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "        title=\"Inline Frame Example\"\n",
+    "        width=\"1000\"\n",
+    "        height=\"600\"\n",
+    "        src=\"https://lingwarium.org/UNseL-inter/\">\n",
+    "    </iframe>'''),\n",
+    "    buttonExtract),\n",
+    "    pn.Spacer(sizing_mode='stretch_width'),\n",
+    "    ),    \n",
+    "    title = \"Analyser une nouvelle exigence\", width=pane_width+50, collapsed=True,),\n",
+    "    pn.layout.Divider(),\n",
+    "    pn.Card( \n",
+    "        pn.Row(\n",
+    "            pn.layout.HSpacer(),\n",
+    "            pn.Column(\n",
+    "                selectDir,\n",
+    "                unlNavigatorPane[1],\n",
+    "            ),\n",
+    "            pn.layout.HSpacer(),\n",
+    "        ),\n",
+    "        title = \"Naviguer dans les exigences\", width=pane_width+50, collapsed=True\n",
+    "    ),\n",
+    "    pn.layout.Divider(), \n",
+    "    pn.Card(\n",
+    "        pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "        title=\"Inline Frame Example\"\n",
+    "        width=\"{}\"\n",
+    "        height=\"800\"\n",
+    "        src=\"https://unsel.tetras-lab.io/webvowl/#extraction2\">\n",
+    "    </iframe>'''.format(pane_width)),\n",
+    "        pn.Row(\n",
+    "            #pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/webvowl#extraction\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'ontologie construite pour tout le corpus</button><a>'),\n",
+    "            #pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/static/webvowl_1.1.7/index.html#extraction_SRSA-IP_STB_PHON_00100\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'extraction de SRSA-IP_STB_PHON_00100</button><a>'),            \n",
+    "            pn.widgets.FileDownload(file='/opt/dashboards/static/extraction.ttl', embed=True)\n",
+    "         ), title = \"Résultats sur le corpus\", width=pane_width+50, collapsed=True\n",
+    "    ), \n",
+    ")\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e06f5381-6c2d-4762-bcb9-a914fb5889e3",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#list = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "#for d in list : \n",
+    "#    print(d)\n",
+    "#    uuidStr = d.replace('/opt/dashboards/store/CCTP-SRSA-IP-20210831/','')\n",
+    "#    createOnto(uuidStr)"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}