Skip to content
Snippets Groups Projects
Commit 76ced0b3 authored by David Rouquet's avatar David Rouquet
Browse files

multiproc stuff

parent 7646baa7
Branches
Tags
No related merge requests found
......@@ -13,7 +13,7 @@ from rdflib import Graph
import logging
import logging.config
import multiprocessing_logging
from multiprocessing import Pool
import multiprocessing
from extraction import config, structure, process
from utility.timer import timed
......@@ -166,6 +166,16 @@ def create_ontology_from_amrld_file(amrld_file_path,
return ontology_turtle_string
global result_triple_queue
def pool_function(sentence_indice,sentence_file_list):
sentence_file = sentence_file_list[sentence_indice]
logger.info(f' *** sentence {sentence_indice} *** ')
config.sentence_output_dir = f'-{sentence_indice}'
new_triple_list = __apply_extraction(config, sentence_file)
# The following must handled via a global queue
result_triple_list.extend(new_triple_list)
@timed
def create_ontology_from_amrld_dir(amrld_dir_path,
......@@ -173,7 +183,7 @@ def create_ontology_from_amrld_dir(amrld_dir_path,
onto_prefix=None,
out_file_path=None,
technical_dir_path=None,
processes=1
processes=multiprocessing.cpu_count()-1
):
"""
Method to create an ontology (as Turtle String) from a transduction
......@@ -212,13 +222,17 @@ def create_ontology_from_amrld_dir(amrld_dir_path,
sentence_dir = config.source_sentence_file
sentence_count = 0
result_triple_list = []
#@@ Support for multiprecessing mus be done here
for sentence_file in glob.glob(sentence_dir, recursive = True):
sentence_count += 1
logger.info(f' *** sentence {sentence_count} *** ')
config.sentence_output_dir = f'-{sentence_count}'
new_triple_list = __apply_extraction(config, sentence_file)
result_triple_list.extend(new_triple_list)
result_triple_queue = multiprocessing.Queue()
sentence_file_list = glob.glob(sentence_dir, recursive = True)
# The following is for multiprocessing logging (must be exec before the pool is created
multiprocessing_logging.install_mp_handler()
star_iterable = [(i, sentence_file_list) for i in range(len(sentence_file_list))]
with multiprocessing.Pool(processes) as p:
p.starmap(pool_function, star_iterable)
# -- Final Ontology Generation (factoid_graph)
logger.info('\n === Final Ontology Generation === ')
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment