mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-11-09 14:23:43 +00:00
Update num_processes config for FARMReader
This commit is contained in:
parent
07df974880
commit
f8da804a91
@ -83,7 +83,7 @@ class FARMReader:
|
|||||||
self.top_k_per_candidate = top_k_per_candidate
|
self.top_k_per_candidate = top_k_per_candidate
|
||||||
self.inferencer = Inferencer.load(model_name_or_path, batch_size=batch_size, gpu=use_gpu,
|
self.inferencer = Inferencer.load(model_name_or_path, batch_size=batch_size, gpu=use_gpu,
|
||||||
task_type="question_answering", max_seq_len=max_seq_len,
|
task_type="question_answering", max_seq_len=max_seq_len,
|
||||||
doc_stride=doc_stride)
|
doc_stride=doc_stride, num_processes=num_processes)
|
||||||
self.inferencer.model.prediction_heads[0].context_window_size = context_window_size
|
self.inferencer.model.prediction_heads[0].context_window_size = context_window_size
|
||||||
self.inferencer.model.prediction_heads[0].no_ans_boost = no_ans_boost
|
self.inferencer.model.prediction_heads[0].no_ans_boost = no_ans_boost
|
||||||
self.inferencer.model.prediction_heads[0].n_best = top_k_per_candidate + 1 # including possible no_answer
|
self.inferencer.model.prediction_heads[0].n_best = top_k_per_candidate + 1 # including possible no_answer
|
||||||
@ -91,7 +91,6 @@ class FARMReader:
|
|||||||
self.inferencer.model.prediction_heads[0].n_best_per_sample = top_k_per_sample
|
self.inferencer.model.prediction_heads[0].n_best_per_sample = top_k_per_sample
|
||||||
except:
|
except:
|
||||||
logger.warning("Could not set `top_k_per_sample` in FARM. Please update FARM version.")
|
logger.warning("Could not set `top_k_per_sample` in FARM. Please update FARM version.")
|
||||||
self.num_processes = num_processes
|
|
||||||
self.max_seq_len = max_seq_len
|
self.max_seq_len = max_seq_len
|
||||||
self.use_gpu = use_gpu
|
self.use_gpu = use_gpu
|
||||||
|
|
||||||
@ -227,7 +226,7 @@ class FARMReader:
|
|||||||
|
|
||||||
# get answers from QA model
|
# get answers from QA model
|
||||||
predictions = self.inferencer.inference_from_dicts(
|
predictions = self.inferencer.inference_from_dicts(
|
||||||
dicts=input_dicts, rest_api_schema=True, num_processes=self.num_processes, multiprocessing_chunksize=1
|
dicts=input_dicts, rest_api_schema=True, multiprocessing_chunksize=1
|
||||||
)
|
)
|
||||||
# assemble answers from all the different documents & format them.
|
# assemble answers from all the different documents & format them.
|
||||||
# For the "no answer" option, we collect all no_ans_gaps and decide how likely
|
# For the "no answer" option, we collect all no_ans_gaps and decide how likely
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user