mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-09-26 16:46:58 +00:00
Update hugging_face.py (#5488)
This commit is contained in:
parent
56cea8cbbd
commit
1f88cd165f
@ -334,16 +334,16 @@ class HFLocalInvocationLayer(PromptModelInvocationLayer):
|
|||||||
self, model: Union[str, "PreTrainedModel"], hub_kwargs: Dict, model_kwargs: Optional[Dict] = None
|
self, model: Union[str, "PreTrainedModel"], hub_kwargs: Dict, model_kwargs: Optional[Dict] = None
|
||||||
) -> Union["PreTrainedTokenizer", "PreTrainedTokenizerFast", None]:
|
) -> Union["PreTrainedTokenizer", "PreTrainedTokenizerFast", None]:
|
||||||
"""
|
"""
|
||||||
this method prepares the tokenizer before passing it to transformers' pipeline, so that the instantiated pipeline
|
This method prepares the tokenizer before passing it to transformers' pipeline, so that the instantiated pipeline
|
||||||
object has a working tokenizer.
|
object has a working tokenizer.
|
||||||
|
|
||||||
It basically check whether the pipeline method in the transformers library will load the tokenizer.
|
It checks whether the pipeline method in the transformers library will load the tokenizer.
|
||||||
- If yes, None will be returned, because in this case, the pipeline is intelligent enough to load the tokenizer by itself
|
- If yes, None will be returned, because in this case, the pipeline is intelligent enough to load the tokenizer by itself.
|
||||||
- If not, we will load the tokenizer and an tokenizer instance is returned
|
- If not, we will load the tokenizer and an tokenizer instance is returned.
|
||||||
|
|
||||||
:param model: the name or path of the underlying model
|
:param model: The name or path of the underlying model.
|
||||||
:hub_kwargs: keyword argument related to hugging face hub, including revision, trust_remote_code and use_auth_token
|
:hub_kwargs: Keyword argument related to hugging face hub, including revision, trust_remote_code and use_auth_token.
|
||||||
:model_kwargs: keyword arguments passed to the underlying model
|
:model_kwargs: Keyword arguments passed to the underlying model.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if isinstance(model, str):
|
if isinstance(model, str):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user