mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-07-17 22:11:17 +00:00
merge lazy import blocks (#6358)
This commit is contained in:
parent
2943b83b31
commit
1fff2bc255
@ -9,12 +9,8 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
SUPPORTED_TASKS = ["text-generation", "text2text-generation"]
|
SUPPORTED_TASKS = ["text-generation", "text2text-generation"]
|
||||||
|
|
||||||
with LazyImport(
|
with LazyImport(message="Run 'pip install transformers[torch]'") as torch_and_transformers_import:
|
||||||
message="PyTorch is needed to run this component. Please install it by following the instructions at https://pytorch.org/"
|
|
||||||
) as torch_import:
|
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
with LazyImport(message="Run 'pip install transformers'") as transformers_import:
|
|
||||||
from huggingface_hub import model_info
|
from huggingface_hub import model_info
|
||||||
from transformers import (
|
from transformers import (
|
||||||
pipeline,
|
pipeline,
|
||||||
@ -127,8 +123,7 @@ class HuggingFaceLocalGenerator:
|
|||||||
For some chat models, the output includes both the new text and the original prompt.
|
For some chat models, the output includes both the new text and the original prompt.
|
||||||
In these cases, it's important to make sure your prompt has no stop words.
|
In these cases, it's important to make sure your prompt has no stop words.
|
||||||
"""
|
"""
|
||||||
transformers_import.check()
|
torch_and_transformers_import.check()
|
||||||
torch_import.check()
|
|
||||||
|
|
||||||
pipeline_kwargs = pipeline_kwargs or {}
|
pipeline_kwargs = pipeline_kwargs or {}
|
||||||
generation_kwargs = generation_kwargs or {}
|
generation_kwargs = generation_kwargs or {}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user