diff --git a/unsel_dashboard-iterate.ipynb b/unsel_dashboard-iterate.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..2a3114d4960a2bba74e6d729c274ddcc96741522
--- /dev/null
+++ b/unsel_dashboard-iterate.ipynb
@@ -0,0 +1,1604 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "160ab8ab-091e-4b00-b63a-c3746c71c540",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "application/javascript": [
+       "\n",
+       "(function(root) {\n",
+       "  function now() {\n",
+       "    return new Date();\n",
+       "  }\n",
+       "\n",
+       "  var force = true;\n",
+       "\n",
+       "  if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n",
+       "    root._bokeh_onload_callbacks = [];\n",
+       "    root._bokeh_is_loading = undefined;\n",
+       "  }\n",
+       "\n",
+       "  if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n",
+       "    root._bokeh_timeout = Date.now() + 5000;\n",
+       "    root._bokeh_failed_load = false;\n",
+       "  }\n",
+       "\n",
+       "  function run_callbacks() {\n",
+       "    try {\n",
+       "      root._bokeh_onload_callbacks.forEach(function(callback) {\n",
+       "        if (callback != null)\n",
+       "          callback();\n",
+       "      });\n",
+       "    } finally {\n",
+       "      delete root._bokeh_onload_callbacks\n",
+       "    }\n",
+       "    console.debug(\"Bokeh: all callbacks have finished\");\n",
+       "  }\n",
+       "\n",
+       "  function load_libs(css_urls, js_urls, js_modules, callback) {\n",
+       "    if (css_urls == null) css_urls = [];\n",
+       "    if (js_urls == null) js_urls = [];\n",
+       "    if (js_modules == null) js_modules = [];\n",
+       "\n",
+       "    root._bokeh_onload_callbacks.push(callback);\n",
+       "    if (root._bokeh_is_loading > 0) {\n",
+       "      console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
+       "      return null;\n",
+       "    }\n",
+       "    if (js_urls.length === 0 && js_modules.length === 0) {\n",
+       "      run_callbacks();\n",
+       "      return null;\n",
+       "    }\n",
+       "    console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
+       "    root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n",
+       "\n",
+       "    function on_load() {\n",
+       "      root._bokeh_is_loading--;\n",
+       "      if (root._bokeh_is_loading === 0) {\n",
+       "        console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
+       "        run_callbacks()\n",
+       "      }\n",
+       "    }\n",
+       "\n",
+       "    function on_error() {\n",
+       "      console.error(\"failed to load \" + url);\n",
+       "    }\n",
+       "\n",
+       "    for (var i = 0; i < css_urls.length; i++) {\n",
+       "      var url = css_urls[i];\n",
+       "      const element = document.createElement(\"link\");\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.rel = \"stylesheet\";\n",
+       "      element.type = \"text/css\";\n",
+       "      element.href = url;\n",
+       "      console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
+       "      document.body.appendChild(element);\n",
+       "    }\n",
+       "\n",
+       "    var skip = [];\n",
+       "    if (window.requirejs) {\n",
+       "      window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n",
+       "      \n",
+       "    }\n",
+       "    for (var i = 0; i < js_urls.length; i++) {\n",
+       "      var url = js_urls[i];\n",
+       "      if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
+       "      var element = document.createElement('script');\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.async = false;\n",
+       "      element.src = url;\n",
+       "      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+       "      document.head.appendChild(element);\n",
+       "    }\n",
+       "    for (var i = 0; i < js_modules.length; i++) {\n",
+       "      var url = js_modules[i];\n",
+       "      if (skip.indexOf(url) >= 0) { on_load(); continue; }\n",
+       "      var element = document.createElement('script');\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.async = false;\n",
+       "      element.src = url;\n",
+       "      element.type = \"module\";\n",
+       "      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+       "      document.head.appendChild(element);\n",
+       "    }\n",
+       "    if (!js_urls.length && !js_modules.length) {\n",
+       "      on_load()\n",
+       "    }\n",
+       "  };\n",
+       "\n",
+       "  function inject_raw_css(css) {\n",
+       "    const element = document.createElement(\"style\");\n",
+       "    element.appendChild(document.createTextNode(css));\n",
+       "    document.body.appendChild(element);\n",
+       "  }\n",
+       "\n",
+       "  var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-2.4.1.min.js\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/panel.min.js\"];\n",
+       "  var js_modules = [];\n",
+       "  var css_urls = [\"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/dataframe.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/json.css\"];\n",
+       "  var inline_js = [\n",
+       "    function(Bokeh) {\n",
+       "      inject_raw_css(\"\\n    .bk.pn-loading.arcs:before {\\n      background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n    }\\n    \");\n",
+       "    },\n",
+       "    function(Bokeh) {\n",
+       "      Bokeh.set_log_level(\"info\");\n",
+       "    },\n",
+       "    function(Bokeh) {} // ensure no trailing comma for IE\n",
+       "  ];\n",
+       "\n",
+       "  function run_inline_js() {\n",
+       "    if ((root.Bokeh !== undefined) || (force === true)) {\n",
+       "      for (var i = 0; i < inline_js.length; i++) {\n",
+       "        inline_js[i].call(root, root.Bokeh);\n",
+       "      }} else if (Date.now() < root._bokeh_timeout) {\n",
+       "      setTimeout(run_inline_js, 100);\n",
+       "    } else if (!root._bokeh_failed_load) {\n",
+       "      console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
+       "      root._bokeh_failed_load = true;\n",
+       "    }\n",
+       "  }\n",
+       "\n",
+       "  if (root._bokeh_is_loading === 0) {\n",
+       "    console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n",
+       "    run_inline_js();\n",
+       "  } else {\n",
+       "    load_libs(css_urls, js_urls, js_modules, function() {\n",
+       "      console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
+       "      run_inline_js();\n",
+       "    });\n",
+       "  }\n",
+       "}(window));"
+      ],
+      "application/vnd.holoviews_load.v0+json": "\n(function(root) {\n  function now() {\n    return new Date();\n  }\n\n  var force = true;\n\n  if (typeof root._bokeh_onload_callbacks === \"undefined\" || force === true) {\n    root._bokeh_onload_callbacks = [];\n    root._bokeh_is_loading = undefined;\n  }\n\n  if (typeof (root._bokeh_timeout) === \"undefined\" || force === true) {\n    root._bokeh_timeout = Date.now() + 5000;\n    root._bokeh_failed_load = false;\n  }\n\n  function run_callbacks() {\n    try {\n      root._bokeh_onload_callbacks.forEach(function(callback) {\n        if (callback != null)\n          callback();\n      });\n    } finally {\n      delete root._bokeh_onload_callbacks\n    }\n    console.debug(\"Bokeh: all callbacks have finished\");\n  }\n\n  function load_libs(css_urls, js_urls, js_modules, callback) {\n    if (css_urls == null) css_urls = [];\n    if (js_urls == null) js_urls = [];\n    if (js_modules == null) js_modules = [];\n\n    root._bokeh_onload_callbacks.push(callback);\n    if (root._bokeh_is_loading > 0) {\n      console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n      return null;\n    }\n    if (js_urls.length === 0 && js_modules.length === 0) {\n      run_callbacks();\n      return null;\n    }\n    console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n    root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length;\n\n    function on_load() {\n      root._bokeh_is_loading--;\n      if (root._bokeh_is_loading === 0) {\n        console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n        run_callbacks()\n      }\n    }\n\n    function on_error() {\n      console.error(\"failed to load \" + url);\n    }\n\n    for (var i = 0; i < css_urls.length; i++) {\n      var url = css_urls[i];\n      const element = document.createElement(\"link\");\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.rel = \"stylesheet\";\n      element.type = \"text/css\";\n      element.href = url;\n      console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n      document.body.appendChild(element);\n    }\n\n    var skip = [];\n    if (window.requirejs) {\n      window.requirejs.config({'packages': {}, 'paths': {}, 'shim': {}});\n      \n    }\n    for (var i = 0; i < js_urls.length; i++) {\n      var url = js_urls[i];\n      if (skip.indexOf(url) >= 0) { on_load(); continue; }\n      var element = document.createElement('script');\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.async = false;\n      element.src = url;\n      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n      document.head.appendChild(element);\n    }\n    for (var i = 0; i < js_modules.length; i++) {\n      var url = js_modules[i];\n      if (skip.indexOf(url) >= 0) { on_load(); continue; }\n      var element = document.createElement('script');\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.async = false;\n      element.src = url;\n      element.type = \"module\";\n      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n      document.head.appendChild(element);\n    }\n    if (!js_urls.length && !js_modules.length) {\n      on_load()\n    }\n  };\n\n  function inject_raw_css(css) {\n    const element = document.createElement(\"style\");\n    element.appendChild(document.createTextNode(css));\n    document.body.appendChild(element);\n  }\n\n  var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-2.4.1.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-mathjax-2.4.1.min.js\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/panel.min.js\"];\n  var js_modules = [];\n  var css_urls = [\"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/widgets.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/card.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/dataframe.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/alerts.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/loading.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/markdown.css\", \"https://unpkg.com/@holoviz/panel@0.12.4/dist/css/json.css\"];\n  var inline_js = [\n    function(Bokeh) {\n      inject_raw_css(\"\\n    .bk.pn-loading.arcs:before {\\n      background-image: url(\\\"data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiBzdHlsZT0ibWFyZ2luOiBhdXRvOyBiYWNrZ3JvdW5kOiBub25lOyBkaXNwbGF5OiBibG9jazsgc2hhcGUtcmVuZGVyaW5nOiBhdXRvOyIgdmlld0JveD0iMCAwIDEwMCAxMDAiIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIj4gIDxjaXJjbGUgY3g9IjUwIiBjeT0iNTAiIHI9IjMyIiBzdHJva2Utd2lkdGg9IjgiIHN0cm9rZT0iI2MzYzNjMyIgc3Ryb2tlLWRhc2hhcnJheT0iNTAuMjY1NDgyNDU3NDM2NjkgNTAuMjY1NDgyNDU3NDM2NjkiIGZpbGw9Im5vbmUiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCI+ICAgIDxhbmltYXRlVHJhbnNmb3JtIGF0dHJpYnV0ZU5hbWU9InRyYW5zZm9ybSIgdHlwZT0icm90YXRlIiByZXBlYXRDb3VudD0iaW5kZWZpbml0ZSIgZHVyPSIxcyIga2V5VGltZXM9IjA7MSIgdmFsdWVzPSIwIDUwIDUwOzM2MCA1MCA1MCI+PC9hbmltYXRlVHJhbnNmb3JtPiAgPC9jaXJjbGU+PC9zdmc+\\\")\\n    }\\n    \");\n    },\n    function(Bokeh) {\n      Bokeh.set_log_level(\"info\");\n    },\n    function(Bokeh) {} // ensure no trailing comma for IE\n  ];\n\n  function run_inline_js() {\n    if ((root.Bokeh !== undefined) || (force === true)) {\n      for (var i = 0; i < inline_js.length; i++) {\n        inline_js[i].call(root, root.Bokeh);\n      }} else if (Date.now() < root._bokeh_timeout) {\n      setTimeout(run_inline_js, 100);\n    } else if (!root._bokeh_failed_load) {\n      console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n      root._bokeh_failed_load = true;\n    }\n  }\n\n  if (root._bokeh_is_loading === 0) {\n    console.debug(\"Bokeh: BokehJS loaded, going straight to plotting\");\n    run_inline_js();\n  } else {\n    load_libs(css_urls, js_urls, js_modules, function() {\n      console.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n      run_inline_js();\n    });\n  }\n}(window));"
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "application/javascript": [
+       "\n",
+       "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
+       "  window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
+       "}\n",
+       "\n",
+       "\n",
+       "    function JupyterCommManager() {\n",
+       "    }\n",
+       "\n",
+       "    JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
+       "      if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+       "        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+       "        comm_manager.register_target(comm_id, function(comm) {\n",
+       "          comm.on_msg(msg_handler);\n",
+       "        });\n",
+       "      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+       "        window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
+       "          comm.onMsg = msg_handler;\n",
+       "        });\n",
+       "      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+       "        google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
+       "          var messages = comm.messages[Symbol.asyncIterator]();\n",
+       "          function processIteratorResult(result) {\n",
+       "            var message = result.value;\n",
+       "            console.log(message)\n",
+       "            var content = {data: message.data, comm_id};\n",
+       "            var buffers = []\n",
+       "            for (var buffer of message.buffers || []) {\n",
+       "              buffers.push(new DataView(buffer))\n",
+       "            }\n",
+       "            var metadata = message.metadata || {};\n",
+       "            var msg = {content, buffers, metadata}\n",
+       "            msg_handler(msg);\n",
+       "            return messages.next().then(processIteratorResult);\n",
+       "          }\n",
+       "          return messages.next().then(processIteratorResult);\n",
+       "        })\n",
+       "      }\n",
+       "    }\n",
+       "\n",
+       "    JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
+       "      if (comm_id in window.PyViz.comms) {\n",
+       "        return window.PyViz.comms[comm_id];\n",
+       "      } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+       "        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+       "        var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
+       "        if (msg_handler) {\n",
+       "          comm.on_msg(msg_handler);\n",
+       "        }\n",
+       "      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+       "        var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
+       "        comm.open();\n",
+       "        if (msg_handler) {\n",
+       "          comm.onMsg = msg_handler;\n",
+       "        }\n",
+       "      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+       "        var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
+       "        comm_promise.then((comm) => {\n",
+       "          window.PyViz.comms[comm_id] = comm;\n",
+       "          if (msg_handler) {\n",
+       "            var messages = comm.messages[Symbol.asyncIterator]();\n",
+       "            function processIteratorResult(result) {\n",
+       "              var message = result.value;\n",
+       "              var content = {data: message.data};\n",
+       "              var metadata = message.metadata || {comm_id};\n",
+       "              var msg = {content, metadata}\n",
+       "              msg_handler(msg);\n",
+       "              return messages.next().then(processIteratorResult);\n",
+       "            }\n",
+       "            return messages.next().then(processIteratorResult);\n",
+       "          }\n",
+       "        }) \n",
+       "        var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
+       "          return comm_promise.then((comm) => {\n",
+       "            comm.send(data, metadata, buffers, disposeOnDone);\n",
+       "          });\n",
+       "        };\n",
+       "        var comm = {\n",
+       "          send: sendClosure\n",
+       "        };\n",
+       "      }\n",
+       "      window.PyViz.comms[comm_id] = comm;\n",
+       "      return comm;\n",
+       "    }\n",
+       "    window.PyViz.comm_manager = new JupyterCommManager();\n",
+       "    \n",
+       "\n",
+       "\n",
+       "var JS_MIME_TYPE = 'application/javascript';\n",
+       "var HTML_MIME_TYPE = 'text/html';\n",
+       "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
+       "var CLASS_NAME = 'output';\n",
+       "\n",
+       "/**\n",
+       " * Render data to the DOM node\n",
+       " */\n",
+       "function render(props, node) {\n",
+       "  var div = document.createElement(\"div\");\n",
+       "  var script = document.createElement(\"script\");\n",
+       "  node.appendChild(div);\n",
+       "  node.appendChild(script);\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle when a new output is added\n",
+       " */\n",
+       "function handle_add_output(event, handle) {\n",
+       "  var output_area = handle.output_area;\n",
+       "  var output = handle.output;\n",
+       "  if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
+       "    return\n",
+       "  }\n",
+       "  var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
+       "  var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
+       "  if (id !== undefined) {\n",
+       "    var nchildren = toinsert.length;\n",
+       "    var html_node = toinsert[nchildren-1].children[0];\n",
+       "    html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
+       "    var scripts = [];\n",
+       "    var nodelist = html_node.querySelectorAll(\"script\");\n",
+       "    for (var i in nodelist) {\n",
+       "      if (nodelist.hasOwnProperty(i)) {\n",
+       "        scripts.push(nodelist[i])\n",
+       "      }\n",
+       "    }\n",
+       "\n",
+       "    scripts.forEach( function (oldScript) {\n",
+       "      var newScript = document.createElement(\"script\");\n",
+       "      var attrs = [];\n",
+       "      var nodemap = oldScript.attributes;\n",
+       "      for (var j in nodemap) {\n",
+       "        if (nodemap.hasOwnProperty(j)) {\n",
+       "          attrs.push(nodemap[j])\n",
+       "        }\n",
+       "      }\n",
+       "      attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
+       "      newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
+       "      oldScript.parentNode.replaceChild(newScript, oldScript);\n",
+       "    });\n",
+       "    if (JS_MIME_TYPE in output.data) {\n",
+       "      toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
+       "    }\n",
+       "    output_area._hv_plot_id = id;\n",
+       "    if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
+       "      window.PyViz.plot_index[id] = Bokeh.index[id];\n",
+       "    } else {\n",
+       "      window.PyViz.plot_index[id] = null;\n",
+       "    }\n",
+       "  } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
+       "    var bk_div = document.createElement(\"div\");\n",
+       "    bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
+       "    var script_attrs = bk_div.children[0].attributes;\n",
+       "    for (var i = 0; i < script_attrs.length; i++) {\n",
+       "      toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
+       "    }\n",
+       "    // store reference to server id on output_area\n",
+       "    output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
+       "  }\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle when an output is cleared or removed\n",
+       " */\n",
+       "function handle_clear_output(event, handle) {\n",
+       "  var id = handle.cell.output_area._hv_plot_id;\n",
+       "  var server_id = handle.cell.output_area._bokeh_server_id;\n",
+       "  if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
+       "  var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
+       "  if (server_id !== null) {\n",
+       "    comm.send({event_type: 'server_delete', 'id': server_id});\n",
+       "    return;\n",
+       "  } else if (comm !== null) {\n",
+       "    comm.send({event_type: 'delete', 'id': id});\n",
+       "  }\n",
+       "  delete PyViz.plot_index[id];\n",
+       "  if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
+       "    var doc = window.Bokeh.index[id].model.document\n",
+       "    doc.clear();\n",
+       "    const i = window.Bokeh.documents.indexOf(doc);\n",
+       "    if (i > -1) {\n",
+       "      window.Bokeh.documents.splice(i, 1);\n",
+       "    }\n",
+       "  }\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle kernel restart event\n",
+       " */\n",
+       "function handle_kernel_cleanup(event, handle) {\n",
+       "  delete PyViz.comms[\"hv-extension-comm\"];\n",
+       "  window.PyViz.plot_index = {}\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle update_display_data messages\n",
+       " */\n",
+       "function handle_update_output(event, handle) {\n",
+       "  handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
+       "  handle_add_output(event, handle)\n",
+       "}\n",
+       "\n",
+       "function register_renderer(events, OutputArea) {\n",
+       "  function append_mime(data, metadata, element) {\n",
+       "    // create a DOM node to render to\n",
+       "    var toinsert = this.create_output_subarea(\n",
+       "    metadata,\n",
+       "    CLASS_NAME,\n",
+       "    EXEC_MIME_TYPE\n",
+       "    );\n",
+       "    this.keyboard_manager.register_events(toinsert);\n",
+       "    // Render to node\n",
+       "    var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
+       "    render(props, toinsert[0]);\n",
+       "    element.append(toinsert);\n",
+       "    return toinsert\n",
+       "  }\n",
+       "\n",
+       "  events.on('output_added.OutputArea', handle_add_output);\n",
+       "  events.on('output_updated.OutputArea', handle_update_output);\n",
+       "  events.on('clear_output.CodeCell', handle_clear_output);\n",
+       "  events.on('delete.Cell', handle_clear_output);\n",
+       "  events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
+       "\n",
+       "  OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
+       "    safe: true,\n",
+       "    index: 0\n",
+       "  });\n",
+       "}\n",
+       "\n",
+       "if (window.Jupyter !== undefined) {\n",
+       "  try {\n",
+       "    var events = require('base/js/events');\n",
+       "    var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
+       "    if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
+       "      register_renderer(events, OutputArea);\n",
+       "    }\n",
+       "  } catch(err) {\n",
+       "  }\n",
+       "}\n"
+      ],
+      "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n  window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n    function JupyterCommManager() {\n    }\n\n    JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n      if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n        comm_manager.register_target(comm_id, function(comm) {\n          comm.on_msg(msg_handler);\n        });\n      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n        window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n          comm.onMsg = msg_handler;\n        });\n      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n        google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n          var messages = comm.messages[Symbol.asyncIterator]();\n          function processIteratorResult(result) {\n            var message = result.value;\n            console.log(message)\n            var content = {data: message.data, comm_id};\n            var buffers = []\n            for (var buffer of message.buffers || []) {\n              buffers.push(new DataView(buffer))\n            }\n            var metadata = message.metadata || {};\n            var msg = {content, buffers, metadata}\n            msg_handler(msg);\n            return messages.next().then(processIteratorResult);\n          }\n          return messages.next().then(processIteratorResult);\n        })\n      }\n    }\n\n    JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n      if (comm_id in window.PyViz.comms) {\n        return window.PyViz.comms[comm_id];\n      } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n        var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n        if (msg_handler) {\n          comm.on_msg(msg_handler);\n        }\n      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n        var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n        comm.open();\n        if (msg_handler) {\n          comm.onMsg = msg_handler;\n        }\n      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n        var comm_promise = google.colab.kernel.comms.open(comm_id)\n        comm_promise.then((comm) => {\n          window.PyViz.comms[comm_id] = comm;\n          if (msg_handler) {\n            var messages = comm.messages[Symbol.asyncIterator]();\n            function processIteratorResult(result) {\n              var message = result.value;\n              var content = {data: message.data};\n              var metadata = message.metadata || {comm_id};\n              var msg = {content, metadata}\n              msg_handler(msg);\n              return messages.next().then(processIteratorResult);\n            }\n            return messages.next().then(processIteratorResult);\n          }\n        }) \n        var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n          return comm_promise.then((comm) => {\n            comm.send(data, metadata, buffers, disposeOnDone);\n          });\n        };\n        var comm = {\n          send: sendClosure\n        };\n      }\n      window.PyViz.comms[comm_id] = comm;\n      return comm;\n    }\n    window.PyViz.comm_manager = new JupyterCommManager();\n    \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n  var div = document.createElement(\"div\");\n  var script = document.createElement(\"script\");\n  node.appendChild(div);\n  node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n  var output_area = handle.output_area;\n  var output = handle.output;\n  if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n    return\n  }\n  var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n  var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n  if (id !== undefined) {\n    var nchildren = toinsert.length;\n    var html_node = toinsert[nchildren-1].children[0];\n    html_node.innerHTML = output.data[HTML_MIME_TYPE];\n    var scripts = [];\n    var nodelist = html_node.querySelectorAll(\"script\");\n    for (var i in nodelist) {\n      if (nodelist.hasOwnProperty(i)) {\n        scripts.push(nodelist[i])\n      }\n    }\n\n    scripts.forEach( function (oldScript) {\n      var newScript = document.createElement(\"script\");\n      var attrs = [];\n      var nodemap = oldScript.attributes;\n      for (var j in nodemap) {\n        if (nodemap.hasOwnProperty(j)) {\n          attrs.push(nodemap[j])\n        }\n      }\n      attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n      newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n      oldScript.parentNode.replaceChild(newScript, oldScript);\n    });\n    if (JS_MIME_TYPE in output.data) {\n      toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n    }\n    output_area._hv_plot_id = id;\n    if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n      window.PyViz.plot_index[id] = Bokeh.index[id];\n    } else {\n      window.PyViz.plot_index[id] = null;\n    }\n  } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n    var bk_div = document.createElement(\"div\");\n    bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n    var script_attrs = bk_div.children[0].attributes;\n    for (var i = 0; i < script_attrs.length; i++) {\n      toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n    }\n    // store reference to server id on output_area\n    output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n  }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n  var id = handle.cell.output_area._hv_plot_id;\n  var server_id = handle.cell.output_area._bokeh_server_id;\n  if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n  var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n  if (server_id !== null) {\n    comm.send({event_type: 'server_delete', 'id': server_id});\n    return;\n  } else if (comm !== null) {\n    comm.send({event_type: 'delete', 'id': id});\n  }\n  delete PyViz.plot_index[id];\n  if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n    var doc = window.Bokeh.index[id].model.document\n    doc.clear();\n    const i = window.Bokeh.documents.indexOf(doc);\n    if (i > -1) {\n      window.Bokeh.documents.splice(i, 1);\n    }\n  }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n  delete PyViz.comms[\"hv-extension-comm\"];\n  window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n  handle_clear_output(event, {cell: {output_area: handle.output_area}})\n  handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n  function append_mime(data, metadata, element) {\n    // create a DOM node to render to\n    var toinsert = this.create_output_subarea(\n    metadata,\n    CLASS_NAME,\n    EXEC_MIME_TYPE\n    );\n    this.keyboard_manager.register_events(toinsert);\n    // Render to node\n    var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n    render(props, toinsert[0]);\n    element.append(toinsert);\n    return toinsert\n  }\n\n  events.on('output_added.OutputArea', handle_add_output);\n  events.on('output_updated.OutputArea', handle_update_output);\n  events.on('clear_output.CodeCell', handle_clear_output);\n  events.on('delete.Cell', handle_clear_output);\n  events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n  OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n    safe: true,\n    index: 0\n  });\n}\n\nif (window.Jupyter !== undefined) {\n  try {\n    var events = require('base/js/events');\n    var OutputArea = require('notebook/js/outputarea').OutputArea;\n    if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n      register_renderer(events, OutputArea);\n    }\n  } catch(err) {\n  }\n}\n"
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import panel as pn\n",
+    "import param\n",
+    "from glob import glob\n",
+    "import base64\n",
+    "import re\n",
+    "import requests\n",
+    "from subprocess import Popen, PIPE, STDOUT\n",
+    "import html\n",
+    "from cairosvg import svg2png\n",
+    "import graphviz\n",
+    "from rdflib import Graph, Namespace, URIRef\n",
+    "import os\n",
+    "from collections import OrderedDict\n",
+    "from urllib.request import urlopen\n",
+    "import uuid\n",
+    "import shutil\n",
+    "\n",
+    "pn.extension(comms='ipywidgets')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "7b4685d7-698d-4a86-a0a4-a81d337bc9d7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Parameters\n",
+    "#######################################################################################################\n",
+    "shaclBase = '/opt/dashboards/tools/shacl-1.3.2/bin/'\n",
+    "owl2vowlPath = '/opt/dashboards/tools/owl2vowl_0.3.7/owl2vowl.jar'\n",
+    "storeBase = '../store/CCTP-SRSA-IP-20210831/'\n",
+    "extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/work_graph.ttl' # -- old --- extractionGraph = '/opt/dashboards/tetras-lab-unl-demos/extraction-data-9.ttl'\n",
+    "workDir = 'work-data/'\n",
+    "webvowlData = '/opt/webvowl'\n",
+    "pane_width = 1300"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "id": "2c41c319-4beb-4a85-a232-61a12d00cdbf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# UNL tools functions\n",
+    "#######################################################################################################\n",
+    "\n",
+    "def unl2stuff(unlFilePath, jarPath, outPrefix, outType):\n",
+    "    # Run java parser\n",
+    "    cmd = ['java', '-jar', jarPath,\n",
+    "           '--input-file', unlFilePath,\n",
+    "           '--output-file', outPrefix,\n",
+    "           '--output-type', outType]\n",
+    "    with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:\n",
+    "        p.wait()\n",
+    "        p.stdout.flush()\n",
+    "        if p.returncode != 0:\n",
+    "            print(\"Error in unl2rdf: \\n\\n\"+p.stdout.read().decode())\n",
+    "            print('UNL;')\n",
+    "            #print(text)\n",
+    "            \n",
+    "def unl2dotWeb(unldata) :\n",
+    "    data={'unl': unldata, 'outputs':['dot', 'svg', 'rdf']}\n",
+    "    try:\n",
+    "        r = requests.post('https://unl.demo.tetras-libre.fr/unl2rdf', data=data)\n",
+    "    except Exception as e:\n",
+    "        return 'Error calling https://unl.demo.tetras-libre.fr/unl2rdf : \"{error}\"'.format(error=e)\n",
+    "    html=r.text\n",
+    "    # On utilise une regex au lieu de parser le html car ce dernier est mal formé\n",
+    "    regexSvg = re.compile('<svg.*svg>',re.MULTILINE|re.DOTALL)\n",
+    "    regexRdf = re.compile(\"<code id='rdf' class='collapse show'>(.*?)</code>\",re.MULTILINE|re.DOTALL)\n",
+    "    try : \n",
+    "        svg = regexSvg.search(html).group()\n",
+    "        rdf = regexRdf.search(html).group(1)        \n",
+    "    except Exception as e :\n",
+    "        svg = ''\n",
+    "        rdf = ''\n",
+    "        print(e)\n",
+    "    return(svg, rdf)\n",
+    "\n",
+    "def zipdir(path, ziph):\n",
+    "    # ziph is zipfile handle\n",
+    "    for root, dirs, files in os.walk(path):\n",
+    "        for file in files:\n",
+    "            if not('orig' in root):\n",
+    "                ziph.write(os.path.join(root, file), \n",
+    "                    os.path.relpath(os.path.join(root, file), \n",
+    "                    os.path.join(path, '..')))\n",
+    "\n",
+    "def addBaseUri(rdfStr):\n",
+    "    regexBaseUri = re.compile(\"http://rdf-unl.org.*?sentence.*?ontology\")\n",
+    "    baseUri = regexBaseUri.search(rdfStr).group()\n",
+    "    rdfStr = \"# baseURI: \"+baseUri+\"\\n\"+rdfStr\n",
+    "    return(rdfStr)\n",
+    "\n",
+    "def postEditRdf(rdfPath, frStr, enStr):\n",
+    "    textID = rdfPath.rsplit('/', 1)[0]\n",
+    "    newPrefix = \"http://unsel.rdf-unl.org/\"+textID\n",
+    "    with open(rdfPath,'r') as rdfFile :\n",
+    "        rdfStr = rdfFile.read()\n",
+    "        rdfFile.close()\n",
+    "    regexBaseUri = re.compile(\"http://rdf-unl.org.*?sentence.*?ontology\")\n",
+    "    rdfStr = rdfStr.replace('rdfs:label \"TBD : phrase en langue naturelle\"@inv ;', \n",
+    "                            '<https://unl.tetras-libre.fr/rdf/schema#has_id> \"{}\" ;\\n'.format(textID.split('/')[-2])+'rdfs:label \"\"\"{}\"\"\"@fr ;\\n'.format(frStr)+'    rdfs:label \"\"\"{}\"\"\"@en ;\\n'.format(enStr))\n",
+    "    baseUri = regexBaseUri.search(rdfStr).group()\n",
+    "    oldPrefix = baseUri.rsplit('/', 1)[0]\n",
+    "    rdfStr = rdfStr.replace(oldPrefix+'#ontology', newPrefix.rsplit('/', 1)[0]+'#ontology')    \n",
+    "    rdfStr = rdfStr.replace(oldPrefix+'#', \"http://unsel.rdf-unl.org/uw_lexeme#\")\n",
+    "    rdfStr = \"# baseURI: \"+baseUri+\"\\n @prefix :     <\"+baseUri.replace(\"ontology\",\"\")+\"> .\\n\"+rdfStr\n",
+    "    rdfStr = rdfStr.replace(oldPrefix, newPrefix)\n",
+    "    with open(rdfPath,'w') as rdfFile :\n",
+    "        rdfStr = rdfFile.write(rdfStr)\n",
+    "        rdfFile.close()\n",
+    "\n",
+    "def replaceInplace(filePath, searchText, replaceText):\n",
+    "    #read input file\n",
+    "    fin = open(filePath, \"rt\")\n",
+    "    #read file contents to string\n",
+    "    data = fin.read()\n",
+    "    #replace all occurrences of the required string\n",
+    "    data = data.replace(searchText, replaceText)\n",
+    "    #close the input file\n",
+    "    fin.close()\n",
+    "    #open the input file in write mode\n",
+    "    fin = open(filePath, \"wt\")\n",
+    "    #overrite the input file with the resulting data\n",
+    "    fin.write(data)\n",
+    "    #close the file\n",
+    "    fin.close()\n",
+    "    \n",
+    "def createStoreDirectory(unlStr, srsaRef):\n",
+    "    storeDir = storeBase+srsaRef+\"/current/\"\n",
+    "    regexFr = re.compile(\"{org:fr}\\n(.*?)\\n{/org}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        frStr = regexFr.search(unlStr).group(1)\n",
+    "    except AttributeError:\n",
+    "        frStr = ''    \n",
+    "    enStr = ''\n",
+    "    # Create a directory named after 'Référence'\n",
+    "    try :\n",
+    "        os.makedirs(storeDir)\n",
+    "    except FileExistsError:\n",
+    "        pass\n",
+    "    # Add english translation to unl code\n",
+    "    unlStr = unlStr.replace(\"{/org}\", \"{{/org}}\\n{{en}}\\n{enStr}\\n{{/en}}\".format(enStr=enStr))  \n",
+    "    # Write UNL code to a file\n",
+    "    with open(storeDir+srsaRef+'.unl','w') as unlFile:\n",
+    "        unlFile.write(unlStr)\n",
+    "    os.chmod(storeDir+srsaRef+'.unl',0o766)\n",
+    "    # Send UNL code to https://unl.demo.tetras-libre.fr/unl2rdf to get SVG and RDF\n",
+    "    #svg, rdf = unl2dotWeb(unlStr)\n",
+    "    \n",
+    "    # Use unltools jar to create ttl and dot file from unl    \n",
+    "    unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'rdf')\n",
+    "    postEditRdf(storeDir+srsaRef+'.ttl', frStr, enStr)\n",
+    "    unl2stuff(storeDir+srsaRef+'.unl', '/opt/dashboards/tools/unl2rdf-app-0.9.jar', storeDir+srsaRef, 'dot')\n",
+    "    \n",
+    "    # Generate svg and png\n",
+    "    graphviz.render('dot', 'svg', storeDir+srsaRef+'.dot') \n",
+    "    graphviz.render('dot', 'png', storeDir+srsaRef+'.dot')\n",
+    "    # Rename generated svg and png so they are not named like file.dot.svg\n",
+    "    svgList = glob(storeDir+srsaRef+\"*.svg\")\n",
+    "    for svgPath in svgList:\n",
+    "        svgNewPath = svgPath.replace(\".dot\",\"\")\n",
+    "        os.rename(svgPath, svgNewPath)\n",
+    "    pngList = glob(storeDir+srsaRef+\"*.png\")\n",
+    "    for pngPath in pngList:\n",
+    "        pngNewPath = pngPath.replace(\".dot\",\"\")\n",
+    "        os.rename(pngPath, pngNewPath)\n",
+    "    \n",
+    "    # Add full text sentences to the svg\n",
+    "    replaceInplace(storeDir+srsaRef+'.svg', '</svg>','''<text x=\"0\" y=\"-40\">fr : {fr}</text>\n",
+    "<text x=\"0\" y=\"-20\">en : {en}</text>\n",
+    "</svg>'''.format(fr=frStr,  en=enStr))\n",
+    "    \n",
+    "    #svgWithTxt = svg.replace('</svg>','''<text x=\"0\" y=\"-40\">fr : {fr}</text>\n",
+    "#<text x=\"0\" y=\"-20\">en : {en}</text>\n",
+    "#</svg>'''.format(fr=frStr,  en=enStr))  \n",
+    "    \n",
+    "    #with open(storeDir+srsaRef+'.svg','w') as svgFile:\n",
+    "    #    svgFile.write(svgWithTxt)\n",
+    "    #os.chmod(storeDir+srsaRef+'.svg',0o766)\n",
+    "    #with open(storeDir+srsaRef+'.ttl','w') as rdfFile:\n",
+    "    #    rdfFile.write(rdf)\n",
+    "    os.chmod(storeDir+srsaRef+'.ttl',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.svg',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.png',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.dot',0o766)\n",
+    "    os.chmod(storeDir+srsaRef+'.unl',0o766)\n",
+    "\n",
+    "    # Convert svg to png and write to a file\n",
+    "    #try:\n",
+    "    #    svg2png(bytestring=svgWithTxt, write_to=storeDir+srsaRef+'.png')\n",
+    "    #except :\n",
+    "    #    pass\n",
+    "    shutil.copytree(storeDir, storeBase+srsaRef+\"/orig/\") \n",
+    "    with open(storeBase+srsaRef+\"/current/\"+srsaRef+'.comments','w') as commentFile:\n",
+    "        commentFile.write(\"[David] : Saisissez vos commentaires en commençant par votre nom, n'oubliez pas d'enregistrer : \")\n",
+    "    os.chmod(storeBase+srsaRef+\"/current/\"+srsaRef+'.comments',0o766)\n",
+    "\n",
+    "def writeUnlFiles(unlStr, storePrefix):\n",
+    "    srsaRef = selectDir.value\n",
+    "    with open(storePrefix+'.unl','w') as unlFile:\n",
+    "        unlFile.write(unlStr)\n",
+    "        unlFile.close()   \n",
+    "        \n",
+    "def createFolderFromUnselInter(srsaRef):\n",
+    "    url = 'https://lingwarium.org/UNseL-inter/GrapheUNL.txt'\n",
+    "    unlStr = urlopen(url).read().decode('utf-8').replace('[P:1]','').replace('[/P]','')\n",
+    "    createStoreDirectory(unlStr, srsaRef)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "id": "7b32d69a-52fb-4b9d-8cd9-5fb45c177284",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Extraction\n",
+    "#######################################################################################################\n",
+    "\n",
+    "def run_command(cmd):\n",
+    "    with Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) as p:\n",
+    "        p.poll()\n",
+    "        p.stdout.flush()\n",
+    "        p.stderr.flush()\n",
+    "        stdout, stderr = p.communicate()\n",
+    "    return p.returncode, stdout, stderr\n",
+    "\n",
+    "# Run SHACL infer sh script. mode argument can take the values 'infer' or 'validate'\n",
+    "def shaclInfer(ttlPath, mode, ttlRulesPath = ''):\n",
+    "    if ttlRulesPath == '':\n",
+    "        cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath]\n",
+    "    else:\n",
+    "        cmd = ['sh', shaclBase+'/shacl'+mode+'.sh', '-datafile', ttlPath, '-shapesfile', ttlRulesPath]\n",
+    "    #cmd = ' '.join(cmd)    \n",
+    "    #!{cmd}\n",
+    "    code, out, err = run_command(cmd)\n",
+    "    if code != 0:\n",
+    "         print(\"Error in SHACL script: \\n\\n\"+err)\n",
+    "    else:\n",
+    "        return(out)\n",
+    "    \n",
+    "def export_result(g):\n",
+    "    export_file = 'output.ttl'\n",
+    "    g.serialize(destination=export_file, base=base_uri, format='turtle')\n",
+    "    \n",
+    "#shaclInfer('/opt/dashboards/tetras-lab-unl-demos/demo-cctp-40.ttl', 'infer')\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "5c7164d7-c074-4aa3-9776-0cc5cc8f18f7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#==============================================================================\n",
+    "# TENET: prepare work data\n",
+    "#------------------------------------------------------------------------------\n",
+    "# Prepare work data for extraction processing. \n",
+    "#==============================================================================\n",
+    "\n",
+    "#==============================================================================\n",
+    "# Parameters\n",
+    "#==============================================================================\n",
+    "\n",
+    "# Working directories\n",
+    "CONFIG_DIR = \"/opt/dashboards/tools/tenet/config/\"\n",
+    "FRAME_DIR = \"/opt/dashboards/tools/tenet/frame/\"\n",
+    "CORPUS_DIR = storeBase\n",
+    "OUTPUT_DIR = \"output/\"\n",
+    "\n",
+    "# Config Definition\n",
+    "TURTLE_SUFFIX = \".ttl\"\n",
+    "frame_file = \"system-ontology.ttl\"\n",
+    "dash_file = \"dash-data-shapes.ttl\" # data from \"http://datashapes.org/dash.ttl\"\n",
+    "schema_file = \"unl-rdf-schema.ttl\"\n",
+    "semantic_net_file = \"semantic-net.ttl\"\n",
+    "cts_file = \"transduction-schemes.ttl\"\n",
+    "c_param_file = \"config-parameters.ttl\"\n",
+    "\n",
+    "# Dev Tests\n",
+    "base_uri = \"https://unsel.tetras-libre.fr/tenet/working\"\n",
+    "\n",
+    "#==============================================================================\n",
+    "# Graph Initialization\n",
+    "#==============================================================================\n",
+    "    \n",
+    "def load_config(work_graph):    \n",
+    "    file_ref = CONFIG_DIR + schema_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + semantic_net_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + dash_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "    \n",
+    "    file_ref = CONFIG_DIR + c_param_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "\n",
+    "def load_frame(work_graph):\n",
+    "    file_ref = FRAME_DIR + frame_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "\n",
+    "#def define_namespace(work_graph):\n",
+    "#    print(\"-- Namespace Definition:\")\n",
+    "#    \n",
+    "#    sys_uri = \"https://unsel.tetras-libre.fr/tenet/frame/system-ontology/\"\n",
+    "#    concept_classes = [\"agent\"]\n",
+    "#    for concept in concept_classes:\n",
+    "#        new_prefix = \"sys-\" + concept\n",
+    "#        new_uri = URIRef(sys_uri + concept + '#') \n",
+    "#        work_graph.namespace_manager.bind(new_prefix, new_uri)\n",
+    "#        print(\"----- \" + new_prefix + \": \" + new_uri)\n",
+    "#    print(list(work_graph.namespace_manager.namespaces()))  \n",
+    "    \n",
+    "def load_sentences(work_graph, corpus):\n",
+    "    target_ref = CORPUS_DIR + corpus + '/current/*.ttl'\n",
+    "    for file_ref in glob(target_ref):\n",
+    "        if 'factoid' not in file_ref :\n",
+    "            work_graph.parse(file_ref)\n",
+    "                      \n",
+    "def load_cts(work_graph):    \n",
+    "    file_ref = CONFIG_DIR + cts_file\n",
+    "    work_graph.parse(file_ref)\n",
+    "        \n",
+    "#==============================================================================\n",
+    "# Result (export)\n",
+    "#==============================================================================\n",
+    "     \n",
+    "def export_result(work_graph, export_ref, export_file):    \n",
+    "    work_graph.serialize(destination=export_file, \n",
+    "                         base=base_uri + '/' + export_ref, \n",
+    "                         format='turtle')\n",
+    "    \n",
+    "    \n",
+    "def finalize_export_file(export_file):\n",
+    "    \"\"\" finalize the export file by adding some useful prefixes \"\"\"\n",
+    "    \n",
+    "    with open(export_file, \"rt\") as file:\n",
+    "        x = file.read()\n",
+    "    \n",
+    "    with open(export_file, \"wt\") as file:\n",
+    "        x = x.replace(\n",
+    "            \"@prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .\",\n",
+    "            \"\"\"\n",
+    "            @prefix sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/> .\n",
+    "            @prefix sys-class: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/class/> .\n",
+    "            @prefix sys-property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/property/> .\n",
+    "            @prefix sys-relation: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/relation/> .\n",
+    "            @prefix sys-Event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/Event#> .\n",
+    "            @prefix sys-event: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/eventObjectProperty#> .\n",
+    "            @prefix sys-State_Property: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/State_Property#> .\n",
+    "            @prefix sys-stateProperty: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/statePropertyObjectProperty#> .\n",
+    "            @prefix sys-abstract_thing: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/abstract_thing#> .\n",
+    "            @prefix sys-action_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/action_verb#> .\n",
+    "            @prefix sys-agent: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/agent#> .\n",
+    "            @prefix sys-attributive_verb: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/attributive_verb#> .\n",
+    "            @prefix sys-component: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/component#> .\n",
+    "            @prefix sys-message: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/message#> .\n",
+    "            @prefix sys-place: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/place#> .\n",
+    "            \"\"\")\n",
+    "        file.write(x)\n",
+    "        \n",
+    "#==============================================================================\n",
+    "# Main Function\n",
+    "#==============================================================================\n",
+    "  \n",
+    "def createTenetGraph(corpus):\n",
+    "    try:    \n",
+    "        work_graph = Graph()\n",
+    "        load_config(work_graph)   \n",
+    "        load_frame(work_graph) \n",
+    "        #define_namespace(work_graph)\n",
+    "        load_cts(work_graph)\n",
+    "        load_sentences(work_graph, corpus)\n",
+    "        output_file = extractionGraph\n",
+    "        export_result(work_graph, corpus, output_file)\n",
+    "        finalize_export_file(output_file)\n",
+    "        return(work_graph)\n",
+    "    except Exception as e :\n",
+    "        print(\"!!! An exception occurred importing rdf graphs for extraction !!!\\n\"+str(e)) \n",
+    "        \n",
+    "def addSentenceInTenetGraph(work_graph, sentence_ref):\n",
+    "    \"\"\" TODO: add a sentence to work_graph \"\"\"\n",
+    "    pass\n",
+    "        \n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "f66bfcd2-f2b9-4603-b1f2-d4fb643c8c3c",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "clearExecutionInstances = \"\"\"\n",
+    "    PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>\n",
+    "    PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "    PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "\n",
+    "    DELETE {?x rdf:type ?c}\n",
+    "    WHERE {\n",
+    "        ?c rdfs:subClassOf* cts:Transduction_Schemes .\n",
+    "        ?x rdf:type ?c .\n",
+    "    }\n",
+    "\"\"\"\n",
+    "\n",
+    "addExecutionInstance = \"\"\"\n",
+    "    PREFIX cts: <https://unsel.tetras-libre.fr/tenet/transduction-schemes#>\n",
+    "    PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "    PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "\n",
+    "    INSERT DATA {{<exec_instance> rdf:type {}}}\n",
+    "\"\"\"\n",
+    "\n",
+    "\n",
+    "\n",
+    "def owl2vowl(ttlFilePath, importList=[]):\n",
+    "    # Run java parser\n",
+    "    if importList == []:\n",
+    "        cmd = ['java', '-jar', owl2vowlPath,\n",
+    "           '-file', ttlFilePath]  \n",
+    "    else:\n",
+    "        cmd = ['java', '-jar', owl2vowlPath,\n",
+    "           '-file', ttlFilePath,\n",
+    "           '-dependencies'] + importList \n",
+    "    with Popen(cmd, stdout=PIPE, stderr=STDOUT) as p:\n",
+    "        p.wait()\n",
+    "        p.stdout.flush()\n",
+    "        if p.returncode != 0:\n",
+    "            print(\"Error in owl2vowl: \\n\\n\"+p.stdout.read().decode())\n",
+    "    outFileName = ttlFilePath.split('/')[-1].replace('ttl','json')\n",
+    "    os.rename(outFileName, '/opt/webvowl/'+outFileName)\n",
+    "    \n",
+    "def applyInferStep(uuidStr, graph, step):\n",
+    "    step_ref = \"cts:\" + step\n",
+    "    dest_file = workDir + uuidStr + '-' + step + \".ttl\"\n",
+    "    base_ref = \"http://\" + uuidStr + '/' + step\n",
+    "    graph.update(clearExecutionInstances)\n",
+    "    graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'\n",
+    "    graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference\n",
+    "    work_file = dest_file\n",
+    "    inferResult = shaclInfer(work_file, 'infer') # apply SHACL inference\n",
+    "    graph.parse(data=inferResult) # update graph with inference\n",
+    "    graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference\n",
+    "    return graph, inferResult\n",
+    "    \n",
+    "    \n",
+    "#owl2vowl('/opt/dashboards/printtatic/extraction.ttl', importList=['system-ontology.ttl'])\n",
+    "def createOnto(uuidStr):\n",
+    "    \n",
+    "    # -- Initialization (creation of extraction graph)\n",
+    "    step = 'init_graph'\n",
+    "    # -- old --- step_ref = \"cts:\" + step\n",
+    "    dest_file = workDir + uuidStr + '-' + step + \".ttl\"\n",
+    "    base_ref = \"http://\" + uuidStr + '/' + step\n",
+    "    graph = createTenetGraph(uuidStr)\n",
+    "    graph.serialize(destination=dest_file, base=base_ref, format='turtle')\n",
+    "    \n",
+    "    # -- Extraction\n",
+    "    graph, _ = applyInferStep(uuidStr, graph, 'preprocessing')\n",
+    "    graph, _ = applyInferStep(uuidStr, graph, 'net_extension')\n",
+    "    graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation_dga_patch')\n",
+    "    # -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'generation')\n",
+    "    \n",
+    "    # -- dev --- Generation step by step\n",
+    "    # -- old --- graph, _ = applyInferStep(uuidStr, graph, 'class_generation')\n",
+    "    # -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_1')\n",
+    "    # -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_2')\n",
+    "    # -- old --- graph, _ = applyInferStep(uuidStr, graph, 'relation_generation_3_1')\n",
+    "    # -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'relation_generation_3_2')\n",
+    "    # -- old --- graph, finalInferResult = applyInferStep(uuidStr, graph, 'relation_generation')\n",
+    "    # -- dev ---\n",
+    "    \n",
+    "    # -- Preprocessing\n",
+    "    # -- old --- step = 'preprocessing'\n",
+    "    # -- old --- step_ref = \"cts:\" + step\n",
+    "    # -- old --- dest_file = step + \".ttl\"\n",
+    "    # -- old --- base_ref = \"http://\" + step\n",
+    "    # -- old --- graph.update(clearExecutionInstances)\n",
+    "    # -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')\n",
+    "    # -- old --- work_file = dest_file\n",
+    "    # -- old --- inferResult1 = shaclInfer(work_file, 'infer')\n",
+    "    # -- old --- graph.parse(data=inferResult1)\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')\n",
+    "     \n",
+    "    # -- Net Extension    \n",
+    "    # -- old --- step = 'net_extension'\n",
+    "    # -- old --- step_ref = \"cts:\" + step\n",
+    "    # -- old --- dest_file = step + \".ttl\"\n",
+    "    # -- old --- base_ref = \"http://\" + step\n",
+    "    # -- old --- graph.update(clearExecutionInstances)\n",
+    "    # -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')\n",
+    "    # -- old --- work_file = dest_file\n",
+    "    # -- old --- inferResult2 = shaclInfer(work_file, 'infer')\n",
+    "    # -- old --- graph.parse(data=inferResult2)\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle')\n",
+    "    \n",
+    "    # -- Generation\n",
+    "    # -- old --- step = 'generation'\n",
+    "    # -- old --- step_ref = \"cts:\" + step\n",
+    "    # -- old --- dest_file = step + \".ttl\"\n",
+    "    # -- old --- base_ref = \"http://\" + step\n",
+    "    # -- old --- graph.update(clearExecutionInstances)\n",
+    "    # -- old --- graph.update(addExecutionInstance.format(step_ref)) # ex. : step = 'cts:generation'\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph before inference\n",
+    "    # -- old --- work_file = dest_file\n",
+    "    # -- old --- finalInferResult = shaclInfer(work_file, 'infer') # apply SHACL inference\n",
+    "    # -- old --- graph.parse(data=finalInferResult) # update graph with inference\n",
+    "    # -- old --- graph.serialize(destination=dest_file, base=base_ref, format='turtle') # serialize graph after inference\n",
+    "        \n",
+    "    # -- Result\n",
+    "    # -- old --- file_ref = CONFIG_DIR + schema_file\n",
+    "    # -- old --- sys_frame_onto = FRAME_DIR + frame_file\n",
+    "    factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'\n",
+    "    with open(factoidPath, 'w') as outfile:\n",
+    "         outfile.write(finalInferResult)\n",
+    "         outfile.close()\n",
+    "    \n",
+    "    owl2vowl(factoidPath)#, importList=[sys_frame_onto])\n",
+    "    \n",
+    "    #result_graph.parse(data=finalInferResult) # update with inferences resulting of extraction final step\n",
+    "    #factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'\n",
+    "    #result_graph.serialize(destination=factoidPath, base=base_ref, format='turtle') \n",
+    "    \n",
+    "    # On exécute ensuite seulement les règles de génération pour récupérer seulement le nécesaire à la visualisation\n",
+    "    # -- old --- graph.update(clearExecutionInstances)\n",
+    "    # -- old --- graph.update(addExecutionInstance.format('cts:generation'))\n",
+    "    # -- old --- graph.serialize(destination=\"tmp.ttl\", base=\"http://tmp\", format='turtle')\n",
+    "    \n",
+    "    # -- old --- factoidPath = storeBase+uuidStr+'/current/'+uuidStr+'_factoid.ttl'\n",
+    "    # -- old --- with open(factoidPath, 'w') as outfile:\n",
+    "    # -- old ---     inferResult3 = shaclInfer('tmp.ttl', 'infer')\n",
+    "    # -- old ---     outfile.write(inferResult3)\n",
+    "    # -- old ---     outfile.close()\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "40b54849-9333-4819-b953-6e816ffe474c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Validation\n",
+    "#######################################################################################################\n",
+    "def pyshaclValidate():\n",
+    "    from pyshacl import validate\n",
+    "    data_file = open('tmp.ttl').read()\n",
+    "    shapes_file = open('test-shacl-construct.shapes-order.ttl').read()\n",
+    "    conforms, v_graph, v_text = validate(data_file, shacl_graph=shapes_file)\n",
+    "    print(conforms)\n",
+    "    print(v_graph)\n",
+    "    print(v_text)\n",
+    "    \n",
+    "def loadFactoids(directory):\n",
+    "    ttlList = glob(directory+\"/*/current/*.ttl\")\n",
+    "    g = Graph()\n",
+    "    for ttl in ttlList :\n",
+    "        g.parse(ttl)\n",
+    "    g.parse('/opt/dashboards/tools/tenet/frame/system-ontology.ttl')\n",
+    "    g.parse('/opt/dashboards/tools/tenet/config/unl-rdf-schema.ttl')\n",
+    "    return(g)\n",
+    "    \n",
+    "possibleUnderspecificationQuery ='''\n",
+    "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?thisId (CONCAT('Parle-t-on bien de tous les \"', ?entityLabel, '\" possibles ? (', ?subEntities, ')' ) AS ?message)\n",
+    "WHERE {\n",
+    "FILTER(?count>1)\n",
+    "{SELECT ?this ?thisId ?entityLabel (GROUP_CONCAT(?subEntityLabel;SEPARATOR=\", \") AS ?subEntities) (COUNT(?subEntityLabel) AS ?count)\n",
+    "WHERE {\n",
+    "\t?subEntity rdfs:subClassOf ?entity ; rdfs:label ?subEntityLabel .\n",
+    "\t{SELECT ?this ?entity ?thisId ?entityLabel\n",
+    "\tWHERE {\n",
+    "        BIND(\"##ID##\" AS ?thisId)\n",
+    "    \t?this a unl:UNL_Sentence ; unl:has_id ?thisId .\n",
+    "\t\t?entity sys:from_structure ?this ; \n",
+    "\t\t\trdfs:subClassOf+ sys:Structure ;\n",
+    "\t\t\trdfs:label ?entityLabel .\n",
+    "\t\t FILTER (\n",
+    "     \t\t!EXISTS {?subEntity1 rdfs:subClassOf ?entity; sys:from_structure ?this}\n",
+    "     )\n",
+    "\t}} \n",
+    "}\n",
+    "GROUP BY ?this ?thisId ?entityLabel }\n",
+    "}\n",
+    "'''\n",
+    "\n",
+    "possibleClassEquivalenceQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT (CONCAT(?messageTMP, ?sentenceList, ')') AS ?message) ?sentenceList\n",
+    "WHERE {\n",
+    "SELECT   ?messageTMP (GROUP_CONCAT(?sentenceId; SEPARATOR=\", \")  AS ?sentenceList)\n",
+    "WHERE {\t\n",
+    "\tSELECT DISTINCT ?messageTMP ?sentenceId\n",
+    "\tWHERE {\n",
+    "\t\tFILTER (?count = 1)\n",
+    "\t\t?subEntity rdfs:subClassOf ?this ; rdfs:label ?subEntityLabel ; sys:from_structure ?subEntitySentence  .\n",
+    "\t\t?this rdfs:label ?thisLabel ; sys:from_structure ?thisSentence .\n",
+    "    \tBIND(CONCAT('\"', ?subEntityLabel, '\" est la seule sous classe de \"', ?thisLabel, '\". Ces classes sont-elles équivalentes ? <br/>(voir les exigences ') AS ?messageTMP)\n",
+    "    \t\t{BIND(\"##ID##\" AS ?thisSentenceId)\n",
+    "            ?thisSentence unl:has_id ?thisSentenceId .\n",
+    "\t\t\tBIND (?thisSentenceId AS ?sentenceId)} \n",
+    "\t\tUNION \n",
+    "\t\t\t{?subEntitySentence unl:has_id ?subEntitySentenceId .\n",
+    "\t\t\tBIND (?subEntitySentenceId AS ?sentenceId)}\n",
+    "\tFILTER(NOT EXISTS {?subEntity sys:from_structure ?thisSentence})\n",
+    "\t\t{SELECT ?this (COUNT(?subClass) AS ?count)\n",
+    "\t\tWHERE {\n",
+    "    \t    \t?this rdfs:subClassOf+ sys:Structure .\n",
+    "\t\t\t\t?subClass rdfs:subClassOf ?this\n",
+    "\t\t} GROUP BY ?this } \n",
+    "\t} ORDER BY ?sentenceId\n",
+    "} GROUP BY ?messageTMP\n",
+    "}'''\n",
+    "\n",
+    "unfeaturedDomainOrRangeQuery = '''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?sentenceId (CONCAT(?messageTMP, GROUP_CONCAT(?featureLabel ; SEPARATOR=', ')) AS ?message)\n",
+    "WHERE {\n",
+    "    SELECT DISTINCT ?sentenceId ?featureLabel (CONCAT( 'Dans cette exigence, \"', ?unfeaturedLabel, '\" pourrait être précisé par : ') AS ?messageTMP)\n",
+    "    WHERE {\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:objectProperty ;\n",
+    "            rdfs:domain ?featured ; \n",
+    "            rdfs:range ?unfeatured .\n",
+    "        }\n",
+    "    UNION\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:objectProperty ;\n",
+    "            rdfs:domain ?unfeatured ; \n",
+    "            rdfs:range ?featured .\n",
+    "        }\n",
+    "    ?p rdfs:label ?pLabel .\n",
+    "    ?featured sys:has_feature ?feature .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeatured sys:has_feature ?feature\n",
+    "    })\n",
+    "    ?featuredInstance a ?featured ; sys:from_structure ?sentence.\n",
+    "    ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.\n",
+    "    BIND(\"##ID##\" AS ?sentenceId)\n",
+    "    ?sentence unl:has_id ?sentenceId .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?featuredInstance a ?featured2 .\n",
+    "        ?featured2 rdfs:subClassOf ?featured .\n",
+    "    })\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeaturedInstance a ?unfeatured2 .\n",
+    "        ?unfeatured2 rdfs:subClassOf ?unfeatured .\n",
+    "    })\n",
+    "    ?featured rdfs:label ?featuredLabel .\n",
+    "    ?unfeatured rdfs:label ?unfeaturedLabel .\n",
+    "    ?feature rdfs:label ?featureLabel .\n",
+    "}\n",
+    "    } GROUP BY ?sentenceId ?messageTMP\n",
+    "'''\n",
+    "\n",
+    "unfeaturedDomainOrRangeWithRefQuery ='''PREFIX owl: <http://www.w3.org/2002/07/owl#>\n",
+    "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n",
+    "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n",
+    "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n",
+    "PREFIX unl: <https://unl.tetras-libre.fr/rdf/schema#>\n",
+    "PREFIX net: <https://unsel.tetras-libre.fr/tenet/semantic-net#>\n",
+    "PREFIX cprm: <https://unsel.tetras-libre.fr/tenet/config/parameters#>\n",
+    "PREFIX req: <https://unsel.tetras-libre.fr/tenet/frame/requirement-ontology#>\n",
+    "PREFIX sys: <https://unsel.tetras-libre.fr/tenet/frame/system-ontology/>\n",
+    "PREFIX fprm: <https://unsel.tetras-libre.fr/tenet/frame/parameters#>\n",
+    "\n",
+    "SELECT ?sentenceId (CONCAT('\"', ?unfeaturedLabel, '\" pourrait être précisé par un ou plusieurs attributs parmis : ', ?featureList, '. <br/>(exemples de référence : ', GROUP_CONCAT(?sentence2id ; SEPARATOR=', '), ').') AS ?message)  \n",
+    "WHERE {\n",
+    "{SELECT DISTINCT ?sentenceId ?unfeaturedLabel  ?sentence2id (GROUP_CONCAT(?featureLabel ; SEPARATOR=', ') AS ?featureList) #\n",
+    "WHERE {\n",
+    "    SELECT DISTINCT ?sentenceId ?sentence2id ?unfeaturedLabel ?featureLabel ?otherwiseFeaturedLabel ?featured2label\n",
+    "    WHERE {\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:objectProperty ;\n",
+    "            rdfs:domain ?featured ; \n",
+    "            rdfs:range ?unfeatured .\n",
+    "        }\n",
+    "    UNION\n",
+    "        {\n",
+    "        ?p rdfs:subPropertyOf+ sys:objectProperty ;\n",
+    "            rdfs:domain ?unfeatured ; \n",
+    "            rdfs:range ?featured .\n",
+    "        }\n",
+    "    ?p rdfs:label ?pLabel .\n",
+    "    ?featured sys:has_feature ?feature .\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeatured sys:has_feature ?feature\n",
+    "    })\n",
+    "    ?featuredInstance a ?featured ; sys:from_structure ?sentence.\n",
+    "    ?unfeaturedInstance a ?unfeatured ; sys:from_structure ?sentence.\n",
+    "    BIND(\"##ID##\" AS ?sentenceId)\n",
+    "    ?sentence unl:has_id ?sentenceId .\n",
+    "\n",
+    "\t?otherwiseFeatured rdfs:subClassOf ?unfeatured ; sys:has_feature ?feature2 ; rdfs:label ?otherwiseFeaturedLabel.\n",
+    "    ?otherwiseFeaturedInstance a ?otherwiseFeatured  ; sys:from_structure ?sentence2.\t\n",
+    "\t?sentence2 unl:has_id ?sentence2id .\n",
+    "\t{?otherwiseFeaturedInstance ?p2 ?featuredInstance2} UNION { ?featuredInstance2 ?p2 ?otherwiseFeaturedInstance}\n",
+    "\t?featuredInstance2 a ?featured2 .\n",
+    "\t?featured2  sys:has_feature ?feature2 ; rdfs:label ?featured2label.\n",
+    "\t\t\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?featuredInstance a ?featured2 .\n",
+    "        ?featured2 rdfs:subClassOf ?featured .\n",
+    "    })\n",
+    "    FILTER(NOT EXISTS {\n",
+    "        ?unfeaturedInstance a ?unfeatured2 .\n",
+    "        ?unfeatured2 rdfs:subClassOf ?unfeatured .\n",
+    "    })\n",
+    "    ?featured rdfs:label ?featuredLabel .\n",
+    "    ?unfeatured rdfs:label ?unfeaturedLabel .\n",
+    "    ?feature rdfs:label ?featureLabel .\n",
+    "    }\n",
+    "} GROUP BY ?sentenceId ?unfeaturedLabel  ?sentence2id}\n",
+    "} GROUP BY ?sentenceId ?unfeaturedLabel  ?featureList\n",
+    "'''\n",
+    "\n",
+    "queryTypeDic = {'error':[possibleClassEquivalenceQuery], \n",
+    "                'warning':[possibleUnderspecificationQuery], \n",
+    "                'info':[unfeaturedDomainOrRangeQuery]}\n",
+    "\n",
+    "# On charge les factoïdes pour vérification des nouvelles phrases\n",
+    "g = loadFactoids(storeBase)\n",
+    "#g.serialize(destination='/opt/dashboards/store/SRSA-IP_demo.ttl', format='turtle')\n",
+    "\n",
+    "errorReqDic = {}\n",
+    "warningReqDic = {}\n",
+    "infoReqDic = {}\n",
+    "\n",
+    "\n",
+    "#possibleUnderspecification = g.query(possibleUnderspecificationQuery)\n",
+    "#possibleClassEquivalence = g.query(possibleClassEquivalenceQuery)\n",
+    "#unfeaturedDomainOrRange = g.query(unfeaturedDomainOrRangeQuery)\n",
+    "\n",
+    "\n",
+    "\n",
+    "#for r in possibleUnderspecification : \n",
+    "#    if str(r['thisId']) in warningReqDic:\n",
+    "#        warningReqDic[str(r['thisId'])] += [str(r['message'])]\n",
+    "#    else:\n",
+    "#        warningReqDic[str(r['thisId'])] = [str(r['message'])]    \n",
+    "\n",
+    "#for r in possibleClassEquivalence : \n",
+    "#    sentenceList = str(r['sentenceList']).split(', ')\n",
+    "#    for sentence in sentenceList:\n",
+    "#        if sentence in errorReqDic:\n",
+    "#            errorReqDic[sentence] += [str(r['message'])]\n",
+    "#        else:\n",
+    "#            errorReqDic[sentence] = [str(r['message'])]   \n",
+    "\n",
+    "#for r in unfeaturedDomainOrRange : \n",
+    "#    if str(r['sentenceId']) in infoReqDic:\n",
+    "#        infoReqDic[str(r['sentenceId'])] += [str(r['message'])]\n",
+    "#    else:\n",
+    "#        infoReqDic[str(r['sentenceId'])] = [str(r['message'])]    \n",
+    "\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "id": "744abdb9-b3d6-4025-abc9-2f749644c3ed",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "/opt/dashboards/store/CCTP-SRSA-IP-20210831/SRSA-IP_STB_PHON_01100\n"
+     ]
+    },
+    {
+     "ename": "KeyboardInterrupt",
+     "evalue": "",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
+      "\u001b[0;32m/tmp/ipykernel_29142/776154484.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m     33\u001b[0m                 \u001b[0moutFile\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     34\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 35\u001b[0;31m \u001b[0mupdateAllVerificationMessages\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     36\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     37\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/tmp/ipykernel_29142/776154484.py\u001b[0m in \u001b[0;36mupdateAllVerificationMessages\u001b[0;34m()\u001b[0m\n\u001b[1;32m     23\u001b[0m                 \u001b[0;32mfor\u001b[0m \u001b[0mqueryTMP\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mqueryTypeDic\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mmessageType\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     24\u001b[0m                     \u001b[0mquery\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mqueryTMP\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreplace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"##ID##\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mreqId\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 25\u001b[0;31m                     \u001b[0;32mfor\u001b[0m \u001b[0mr\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mg\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mquery\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mquery\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     26\u001b[0m                         \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mr\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'message'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     27\u001b[0m                         \u001b[0;31m#print(message)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/query.py\u001b[0m in \u001b[0;36m__iter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    290\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    291\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_genbindings\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 292\u001b[0;31m                 \u001b[0;32mfor\u001b[0m \u001b[0mb\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_genbindings\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    293\u001b[0m                     \u001b[0;32mif\u001b[0m \u001b[0mb\u001b[0m\u001b[0;34m:\u001b[0m  \u001b[0;31m# don't add a result row in case of empty binding {}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    294\u001b[0m                         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_bindings\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36m<genexpr>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    508\u001b[0m     \u001b[0mres\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mproject\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    509\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 510\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mrow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproject\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproject\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mPV\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mrow\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mres\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    511\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    512\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalExtend\u001b[0;34m(ctx, extend)\u001b[0m\n\u001b[1;32m     91\u001b[0m     \u001b[0;31m# TODO: Deal with dict returned from evalPart from GROUP BY\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     92\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 93\u001b[0;31m     \u001b[0;32mfor\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     94\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     95\u001b[0m             \u001b[0me\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_eval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_except\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_vars\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalExtend\u001b[0;34m(ctx, extend)\u001b[0m\n\u001b[1;32m     91\u001b[0m     \u001b[0;31m# TODO: Deal with dict returned from evalPart from GROUP BY\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     92\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 93\u001b[0;31m     \u001b[0;32mfor\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     94\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     95\u001b[0m             \u001b[0me\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_eval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_except\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_vars\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalAggregateJoin\u001b[0;34m(ctx, agg)\u001b[0m\n\u001b[1;32m    419\u001b[0m             \u001b[0maggregator\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrow\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    420\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 421\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mrow\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mp\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    422\u001b[0m             \u001b[0;31m# determine right group aggregator for row\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    423\u001b[0m             \u001b[0mk\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtuple\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_eval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrow\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0me\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgroup_expr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalDistinct\u001b[0;34m(ctx, part)\u001b[0m\n\u001b[1;32m    499\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    500\u001b[0m     \u001b[0mdone\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 501\u001b[0;31m     \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mres\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    502\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mdone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    503\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36m<genexpr>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m    508\u001b[0m     \u001b[0mres\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mproject\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    509\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 510\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mrow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproject\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mproject\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mPV\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mrow\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mres\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    511\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    512\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalExtend\u001b[0;34m(ctx, extend)\u001b[0m\n\u001b[1;32m     91\u001b[0m     \u001b[0;31m# TODO: Deal with dict returned from evalPart from GROUP BY\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     92\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 93\u001b[0;31m     \u001b[0;32mfor\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     94\u001b[0m         \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     95\u001b[0m             \u001b[0me\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_eval\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_except\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_vars\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalFilter\u001b[0;34m(ctx, part)\u001b[0m\n\u001b[1;32m    170\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mevalFilter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    171\u001b[0m     \u001b[0;31m# TODO: Deal with dict returned from evalPart!\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 172\u001b[0;31m     \u001b[0;32mfor\u001b[0m \u001b[0mc\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    173\u001b[0m         if _ebv(\n\u001b[1;32m    174\u001b[0m             \u001b[0mpart\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexpr\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalPart\u001b[0;34m(ctx, part)\u001b[0m\n\u001b[1;32m    252\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mevalFilter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    253\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mname\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"Join\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 254\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0mevalJoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    255\u001b[0m     \u001b[0;32melif\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mname\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"LeftJoin\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    256\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mevalLeftJoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpart\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalJoin\u001b[0;34m(ctx, join)\u001b[0m\n\u001b[1;32m    125\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    126\u001b[0m         \u001b[0ma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mjoin\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 127\u001b[0;31m         \u001b[0mb\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mset\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mevalPart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mjoin\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mp2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    128\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0m_join\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalBGP\u001b[0;34m(ctx, bgp)\u001b[0m\n\u001b[1;32m     84\u001b[0m             \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     85\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalBGP\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbgp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     87\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalBGP\u001b[0;34m(ctx, bgp)\u001b[0m\n\u001b[1;32m     84\u001b[0m             \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     85\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalBGP\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbgp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     87\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalBGP\u001b[0;34m(ctx, bgp)\u001b[0m\n\u001b[1;32m     84\u001b[0m             \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     85\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalBGP\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbgp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     87\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalBGP\u001b[0;34m(ctx, bgp)\u001b[0m\n\u001b[1;32m     84\u001b[0m             \u001b[0;32mcontinue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     85\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 86\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mevalBGP\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbgp\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     87\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     88\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/evaluate.py\u001b[0m in \u001b[0;36mevalBGP\u001b[0;34m(ctx, bgp)\u001b[0m\n\u001b[1;32m     54\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     55\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mbgp\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 56\u001b[0;31m         \u001b[0;32myield\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msolution\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     57\u001b[0m         \u001b[0;32mreturn\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     58\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/sparql.py\u001b[0m in \u001b[0;36msolution\u001b[0;34m(self, vars)\u001b[0m\n\u001b[1;32m    333\u001b[0m             )\n\u001b[1;32m    334\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 335\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mFrozenBindings\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbindings\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    336\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    337\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m__setitem__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/sparql.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, ctx, *args, **kwargs)\u001b[0m\n\u001b[1;32m    158\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0mFrozenBindings\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mFrozenDict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    159\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 160\u001b[0;31m         \u001b[0mFrozenDict\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    161\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mctx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    162\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/site-packages/rdflib/plugins/sparql/sparql.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m    103\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    104\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 105\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_d\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    106\u001b[0m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_hash\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    107\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.8/_collections_abc.py\u001b[0m in \u001b[0;36m__iter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    741\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    742\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m__iter__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 743\u001b[0;31m         \u001b[0;32mfor\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mapping\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    744\u001b[0m             \u001b[0;32myield\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_mapping\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    745\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
+     ]
+    }
+   ],
+   "source": [
+    "# Fonctions pour la mise à jour globale du corpus (ne pas exécuter en mode \"Tableau de bord\")\n",
+    "\n",
+    "def updateAllFactoids():\n",
+    "    dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "    for directory in dirList:\n",
+    "        if directory.split('/')[-1] != '0_NONE':\n",
+    "            print(directory)\n",
+    "            reqId = directory.split('/')[-1]\n",
+    "            createOnto(reqId)\n",
+    "\n",
+    "#updateAllFactoids()\n",
+    "        \n",
+    "        \n",
+    "def updateAllVerificationMessages():\n",
+    "    dirList = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "    for directory in dirList:\n",
+    "        if directory.split('/')[-1] != '0_NONE':\n",
+    "            print(directory)\n",
+    "            reqId = directory.split('/')[-1]\n",
+    "            for messageType in ['error', 'warning', 'info']:\n",
+    "                #print(messageType)\n",
+    "                messagesStr = ''\n",
+    "                for queryTMP in queryTypeDic[messageType] :\n",
+    "                    query = queryTMP.replace(\"##ID##\",reqId)\n",
+    "                    for r in g.query(query):\n",
+    "                        message = r['message']\n",
+    "                        #print(message)\n",
+    "                        if message not in messagesStr :\n",
+    "                            #print(\"printing\")\n",
+    "                            messagesStr+=message+\"\\n\"\n",
+    "                outFile = open(directory+'/current/'+reqId+'_'+messageType+'Messages.txt', 'w')\n",
+    "                outFile.write(messagesStr)\n",
+    "                outFile.close()    \n",
+    "\n",
+    "#updateAllVerificationMessages()\n",
+    "\n",
+    "            \n",
+    "#query = possibleUnderspecificationQuery.replace(\"##ID##\",\"SRSA-IP_STB_PHON_00500\")\n",
+    "\n",
+    "#for r in g.query(query):\n",
+    "    #print(r['message'])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ca43f1f2-42ef-4355-a2e2-e27351a51b96",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Navigateur / éditeur de corpus UNL\n",
+    "#######################################################################################################\n",
+    "\n",
+    "saveButtonClicks = 0\n",
+    "\n",
+    "def main_pane(directory):    \n",
+    "    saveButtonClicks = 0\n",
+    "    saveButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)\n",
+    "    saveButtonDic = dict(button=saveButton)\n",
+    "    saveCommentButton = pn.widgets.Button(name='Enregistrer', button_type='success', width = 100)\n",
+    "   \n",
+    "    path = storeBase+directory+'/current/'\n",
+    "    pathOrig = storeBase+directory+'/orig/'\n",
+    "    svgPath = path+directory+'.svg'\n",
+    "    pngPath = path+directory+'.png'\n",
+    "    unlPath = path+directory+'.unl'\n",
+    "    rdfPath = path+directory+'.ttl'\n",
+    "    commentPath = path+directory+'.comments'\n",
+    "    with open(commentPath) as commentFile:\n",
+    "        commentStr = commentFile.read() \n",
+    "        commentFile.close()\n",
+    "    with open(unlPath) as unlFile:\n",
+    "        unlStr = unlFile.read()\n",
+    "        unlFile.close()\n",
+    "    svgPathOrig = pathOrig+directory+'.svg'\n",
+    "    pngPathOrig = pathOrig+directory+'.png'\n",
+    "    unlPathOrig = pathOrig+directory+'.unl'\n",
+    "    rdfPathOrig = pathOrig+directory+'.ttl'\n",
+    "    with open(unlPathOrig) as unlFileOrig:\n",
+    "        unlStrOrig = unlFileOrig.read()\n",
+    "        unlFileOrig.close()\n",
+    "    unlHtmlOrig = unlStrOrig.replace(\"\\n\",\"<br/>\")\n",
+    "    if unlStrOrig == unlStr:\n",
+    "        modIndicator = ''\n",
+    "    else:\n",
+    "        modIndicator = ' <u>modifié</u>'\n",
+    "    regexFr = re.compile(\"{org:fr}\\n(.*?)\\n{/org}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        frStr = regexFr.search(unlStr).group(1)\n",
+    "    except AttributeError:\n",
+    "        frStr = ''\n",
+    "    regexEn = re.compile(\"{en}\\n(.*?)\\n{/en}\",re.MULTILINE|re.DOTALL)\n",
+    "    try:\n",
+    "        enStr = regexEn.search(unlStr).group(1)  \n",
+    "    except AttributeError:\n",
+    "        enStr = ''\n",
+    "        \n",
+    "    unlOrig_html = pn.pane.HTML(unlHtmlOrig)\n",
+    "    unl_input = pn.widgets.input.TextAreaInput(height=400)\n",
+    "    unl_input.value = unlStr\n",
+    "    comment_input = pn.widgets.input.TextAreaInput(height=300)\n",
+    "    comment_input.value = commentStr\n",
+    "    \n",
+    "    downloadSvg = pn.widgets.FileDownload(sizing_mode='stretch_width', file=svgPath, embed=True, name='Télécharger le graphe en SVG :')\n",
+    "    downloadPng = pn.widgets.FileDownload(sizing_mode='stretch_width', file=pngPath, embed=True, name='Télécharger le graphe en PNG :')    \n",
+    "    downloadRdf = pn.widgets.FileDownload(sizing_mode='stretch_width', file=rdfPath, embed=True, name='Télécharger le code UNL-RDF :')\n",
+    "    downloadUnl = pn.widgets.FileDownload(sizing_mode='stretch_width', file=unlPath, embed=True, name='Télécharger le code UNL :')\n",
+    "    \n",
+    "    def compute_unl_graph_pane(button):\n",
+    "        global saveButtonClicks\n",
+    "        if saveButtonClicks != 0:\n",
+    "            writeUnlFiles(unl_input.value, storeBase+selectDir.value+'/current/'+selectDir.value)\n",
+    "        pane = pn.pane.PNG(pngPath, width = pane_width)\n",
+    "        saveButtonClicks += 1\n",
+    "        return(pane)\n",
+    "    \n",
+    "    unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)\n",
+    "    \n",
+    "    \n",
+    "    \n",
+    "    warnColumn = pn.Card(width=pane_width, header = \"**Alertes automatiques pour l'exigence :**\")\n",
+    "    warnings = 0\n",
+    "    try:    \n",
+    "        errorfile = open(storeBase+directory+'/current/'+directory+'_errorMessages.txt','r')\n",
+    "        errorlist = errorfile.readlines()\n",
+    "        errorfile.close()\n",
+    "        for error in errorlist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-danger\">{}</div>'.format(error)))\n",
+    "        if errorlist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass\n",
+    "    try:    \n",
+    "        warnfile = open(storeBase+directory+'/current/'+directory+'_warningMessages.txt','r')\n",
+    "        warnlist = warnfile.readlines()\n",
+    "        warnfile.close()\n",
+    "        for warn in warnlist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-warning\">{}</div>'.format(warn)))\n",
+    "        if warnlist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass \n",
+    "    try:    \n",
+    "        infofile = open(storeBase+directory+'/current/'+directory+'_infoMessages.txt','r')\n",
+    "        infolist = infofile.readlines()\n",
+    "        infofile.close()\n",
+    "        for info in infolist: \n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-info\">{}</div>'.format(info)))\n",
+    "        if infolist != [] :  \n",
+    "            warnings = 1\n",
+    "    except :\n",
+    "        pass\n",
+    "    if warnings == 0:\n",
+    "            warnColumn.append(pn.pane.HTML('<div class=\"alert alert-info\">Pas d\\'anomalie détectée</div>'))\n",
+    "                             \n",
+    "    pane = pn.Column(\n",
+    "            pn.Row(\n",
+    "                pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/webvowl#{}_factoid\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'ontologie construite</button><a>'.format(directory)),\n",
+    "                pn.Column(pn.pane.HTML('<font size=\"tiny\">Exigence sélectionnée : '+directory+'</font>'), sizing_mode='stretch_width'),  \n",
+    "                ),\n",
+    "            #pn.Card(\n",
+    "            #    pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "            #        title=\"Inline Frame Example\"\n",
+    "            #        width=\"{}\"\n",
+    "            #        height=\"800\"\n",
+    "            #        src=\"https://unsel.tetras-lab.io/webvowl/#{}_factoid\">\n",
+    "            #        </iframe>'''.format(pane_width,selectDir.value)),\n",
+    "            #        title = \"Visualiser le factoid\", width=pane_width, collapsed=True),\n",
+    "            warnColumn,\n",
+    "            pn.pane.HTML('FR : '+frStr),\n",
+    "            pn.pane.HTML('EN : '+enStr),\n",
+    "            unl_graph_pane[1],\n",
+    "            pn.Card(pn.Column(saveCommentButton, comment_input, width = pane_width),\n",
+    "                    header='**Commentaires**', \n",
+    "                    collapsed=True, width = pane_width),     \n",
+    "            pn.Card(pn.Column(saveButton, unl_input, width = pane_width),\n",
+    "                    header='**Code UNL**'+modIndicator, \n",
+    "                    collapsed=True, width = pane_width),\n",
+    "            pn.Card(pn.Column(\n",
+    "                        unlOrig_html,\n",
+    "                        pn.Card(pn.pane.PNG(pngPath, width = pane_width-20), header = \"**Graphe d'origine**\", width=pane_width-10)\n",
+    "                    ), \n",
+    "                    header=\"**Code UNL d'origine**\", \n",
+    "                    collapsed=True, width = pane_width),\n",
+    "            pn.WidgetBox(\n",
+    "                pn.Row(downloadPng, downloadSvg),\n",
+    "                pn.Row(downloadUnl, downloadRdf),\n",
+    "                width=pane_width,\n",
+    "            ),\n",
+    "            width=pane_width, \n",
+    "        )\n",
+    "    \n",
+    "    def saveComments(event):\n",
+    "        with open(commentPath, 'w') as commentFile:\n",
+    "            commentFile.write(comment_input.value) \n",
+    "            commentFile.close()\n",
+    "    saveCommentButton.on_click(saveComments)  \n",
+    "    return(pane)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5d4ec56e-d0bb-44c8-975b-49d409b6b160",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#######################################################################################################\n",
+    "# Analyse avec UNseL-inter\n",
+    "#######################################################################################################\n",
+    "def extractOnClick(event):\n",
+    "    uuidStr = \"COMP\"+str(uuid.uuid4())\n",
+    "    createFolderFromUnselInter(uuidStr)\n",
+    "    selectDir.options[uuidStr] = uuidStr\n",
+    "    selectDir.value = uuidStr\n",
+    "    createOnto(uuidStr)\n",
+    "\n",
+    "buttonExtract = pn.widgets.Button(name=\"Construire l'ontologie de la phrase (factoïd)\", width=300)\n",
+    "buttonExtract.param.watch(extractOnClick, 'clicks')\n",
+    "\n",
+    "#######################################################################################################\n",
+    "# Navigation Interface\n",
+    "#######################################################################################################\n",
+    "\n",
+    "pathList = glob(storeBase+'*')\n",
+    "dirList = sorted([x.split('/')[-1] for x in pathList])\n",
+    "warningList = sorted(list(errorReqDic.keys())+list(warningReqDic.keys())+list(infoReqDic.keys()))\n",
+    "\n",
+    "dirDic = {}\n",
+    "for directory in dirList:\n",
+    "    if directory in warningList:\n",
+    "        dirDic['**'+directory] = directory\n",
+    "    else:\n",
+    "        dirDic[directory] = directory\n",
+    "\n",
+    "dirDic = dict(sorted(dirDic.items()))\n",
+    "        \n",
+    "selectDir = pn.widgets.Select(name='Sélectionnez une exigence : ', options=dirDic, width = 350)\n",
+    "selectDir.value = '0_NONE'\n",
+    "dir_selector = dict(directory=selectDir)#, save=saveButton)\n",
+    "\n",
+    "unlNavigatorPane = pn.interact(lambda directory : main_pane(directory), **dir_selector)\n",
+    "\n",
+    "#unl_graph_pane = pn.interact(lambda button : compute_unl_graph_pane(button), **saveButtonDic)\n",
+    "\n",
+    "# Main interface\n",
+    "pn.Column(\n",
+    "pn.Card( pn.Row(\n",
+    "    pn.Spacer(sizing_mode='stretch_width'),\n",
+    "    pn.Column(\n",
+    "    pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "        title=\"Inline Frame Example\"\n",
+    "        width=\"1000\"\n",
+    "        height=\"600\"\n",
+    "        src=\"https://lingwarium.org/UNseL-inter/\">\n",
+    "    </iframe>'''),\n",
+    "    buttonExtract),\n",
+    "    pn.Spacer(sizing_mode='stretch_width'),\n",
+    "    ),    \n",
+    "    title = \"Analyser une nouvelle exigence\", width=pane_width+50, collapsed=True,),\n",
+    "    pn.layout.Divider(),\n",
+    "    pn.Card( \n",
+    "        pn.Row(\n",
+    "            pn.layout.HSpacer(),\n",
+    "            pn.Column(\n",
+    "                selectDir,\n",
+    "                unlNavigatorPane[1],\n",
+    "            ),\n",
+    "            pn.layout.HSpacer(),\n",
+    "        ),\n",
+    "        title = \"Naviguer dans les exigences\", width=pane_width+50, collapsed=True\n",
+    "    ),\n",
+    "    pn.layout.Divider(), \n",
+    "    pn.Card(\n",
+    "        pn.pane.HTML('''<iframe id=\"inlineFrameExample\"\n",
+    "        title=\"Inline Frame Example\"\n",
+    "        width=\"{}\"\n",
+    "        height=\"800\"\n",
+    "        src=\"https://unsel.tetras-lab.io/webvowl/#extraction\">\n",
+    "    </iframe>'''.format(pane_width)),\n",
+    "        pn.Row(\n",
+    "            #pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/webvowl#extraction\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'ontologie construite pour tout le corpus</button><a>'),\n",
+    "            #pn.pane.HTML('<a href=\"https://unsel.tetras-lab.io/static/webvowl_1.1.7/index.html#extraction_SRSA-IP_STB_PHON_00100\" target=\"_blank\"><button type=\"button\" class=\"btn btn-outline-secondary btn-sm\">Visualiser l\\'extraction de SRSA-IP_STB_PHON_00100</button><a>'),            \n",
+    "            pn.widgets.FileDownload(file='/opt/dashboards/static/extraction.ttl', embed=True)\n",
+    "         ), title = \"Résultats sur le corpus\", width=pane_width+50, collapsed=True\n",
+    "    ), \n",
+    ")\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e06f5381-6c2d-4762-bcb9-a914fb5889e3",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#list = glob('/opt/dashboards/store/CCTP-SRSA-IP-20210831/*')\n",
+    "#for d in list : \n",
+    "#    print(d)\n",
+    "#    uuidStr = d.replace('/opt/dashboards/store/CCTP-SRSA-IP-20210831/','')\n",
+    "#    createOnto(uuidStr)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "47f67a4b-4d6c-4192-93b1-e697fd2c0e73",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "18d75469-934f-4021-9126-ca0c1dcd4d98",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.8.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}