mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-09-25 08:04:49 +00:00
If using the local model in pipeline YAML. The PromptModel cannot select the HFLocalInvocationLayer, because of the get_task cannot support the offline model. *Local model usage: add the task_name parameter in model_kwargs for local model. for example text-generation or text2text-generation. - name: PModel type: PromptModel params: model_name_or_path: /local_model_path model_kwargs: task_name: text-generation - name: Prompter params: model_name_or_path: PModel default_prompt_template: question-answering type: PromptNode Signed-off-by: yuanwu <yuan.wu@intel.com>
This commit is contained in:
parent
479092e3c1
commit
c88bc19791
@ -1,5 +1,6 @@
|
||||
from typing import Optional, Union, List, Dict
|
||||
import logging
|
||||
import os
|
||||
|
||||
import torch
|
||||
|
||||
@ -266,6 +267,9 @@ class HFLocalInvocationLayer(PromptModelInvocationLayer):
|
||||
@classmethod
|
||||
def supports(cls, model_name_or_path: str, **kwargs) -> bool:
|
||||
task_name: Optional[str] = None
|
||||
if os.path.exists(model_name_or_path):
|
||||
return True
|
||||
|
||||
try:
|
||||
task_name = get_task(model_name_or_path, use_auth_token=kwargs.get("use_auth_token", None))
|
||||
except RuntimeError:
|
||||
|
Loading…
x
Reference in New Issue
Block a user