mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-12-17 01:58:23 +00:00
Docs: Update OpenAIGen docstrings and add missing headers (#8105)
* update docstrings * Update haystack/components/generators/openai.py Co-authored-by: Daria Fokina <daria.fokina@deepset.ai> --------- Co-authored-by: Daria Fokina <daria.fokina@deepset.ai>
This commit is contained in:
parent
92e2377eff
commit
e8598befb6
@ -18,25 +18,21 @@ logger = logging.getLogger(__name__)
|
|||||||
@component
|
@component
|
||||||
class OpenAIGenerator:
|
class OpenAIGenerator:
|
||||||
"""
|
"""
|
||||||
Text generation component using OpenAI's large language models (LLMs).
|
Generates text using OpenAI's large language models (LLMs).
|
||||||
|
|
||||||
Enables text generation using OpenAI's large language models (LLMs). It supports gpt-4 and gpt-3.5-turbo
|
It works with the gpt-4 and gpt-3.5-turbo models and supports streaming responses
|
||||||
family of models.
|
from OpenAI API. It uses strings as input and output.
|
||||||
|
|
||||||
Users can pass any text generation parameters valid for the `openai.ChatCompletion.create` method
|
You can customize how the text is generated by passing parameters to the
|
||||||
directly to this component via the `**generation_kwargs` parameter in __init__ or the `**generation_kwargs`
|
OpenAI API. Use the `**generation_kwargs` argument when you initialize
|
||||||
parameter in `run` method.
|
the component or when you run it. Any parameter that works with
|
||||||
|
`openai.ChatCompletion.create` will work here too.
|
||||||
|
|
||||||
For more details on the parameters supported by the OpenAI API, refer to the OpenAI
|
|
||||||
[documentation](https://platform.openai.com/docs/api-reference/chat).
|
|
||||||
|
|
||||||
Key Features and Compatibility:
|
For details on OpenAI API parameters, see
|
||||||
- Primary Compatibility: Designed to work seamlessly with gpt-4, gpt-3.5-turbo family of models.
|
[OpenAI documentation](https://platform.openai.com/docs/api-reference/chat).
|
||||||
- Streaming Support: Supports streaming responses from the OpenAI API.
|
|
||||||
- Customizability: Supports all parameters supported by the OpenAI API.
|
|
||||||
|
|
||||||
Input and Output Format:
|
### Usage example
|
||||||
- String Format: This component uses the strings for both input and output.
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from haystack.components.generators import OpenAIGenerator
|
from haystack.components.generators import OpenAIGenerator
|
||||||
@ -65,12 +61,12 @@ class OpenAIGenerator:
|
|||||||
max_retries: Optional[int] = None,
|
max_retries: Optional[int] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Creates an instance of OpenAIGenerator. Unless specified otherwise in the `model`, OpenAI's GPT-3.5 is used.
|
Creates an instance of OpenAIGenerator. Unless specified otherwise in `model`, uses OpenAI's GPT-3.5.
|
||||||
|
|
||||||
By setting the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' you can change the timeout and max_retries parameters
|
By setting the 'OPENAI_TIMEOUT' and 'OPENAI_MAX_RETRIES' you can change the timeout and max_retries parameters
|
||||||
in the OpenAI client.
|
in the OpenAI client.
|
||||||
|
|
||||||
:param api_key: The OpenAI API key.
|
:param api_key: The OpenAI API key to connect to OpenAI.
|
||||||
:param model: The name of the model to use.
|
:param model: The name of the model to use.
|
||||||
:param streaming_callback: A callback function that is called when a new token is received from the stream.
|
:param streaming_callback: A callback function that is called when a new token is received from the stream.
|
||||||
The callback function accepts StreamingChunk as an argument.
|
The callback function accepts StreamingChunk as an argument.
|
||||||
|
|||||||
@ -20,7 +20,7 @@ class InMemoryEmbeddingRetriever:
|
|||||||
In indexing pipelines, use a DocumentEmbedder to embed documents.
|
In indexing pipelines, use a DocumentEmbedder to embed documents.
|
||||||
In query pipelines, use a TextEmbedder to embed queries and send them to the retriever.
|
In query pipelines, use a TextEmbedder to embed queries and send them to the retriever.
|
||||||
|
|
||||||
Usage example:
|
### Usage example
|
||||||
```python
|
```python
|
||||||
from haystack import Document
|
from haystack import Document
|
||||||
from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder
|
from haystack.components.embedders import SentenceTransformersDocumentEmbedder, SentenceTransformersTextEmbedder
|
||||||
|
|||||||
@ -16,7 +16,7 @@ class DocumentWriter:
|
|||||||
"""
|
"""
|
||||||
Writes documents to a DocumentStore.
|
Writes documents to a DocumentStore.
|
||||||
|
|
||||||
Usage example:
|
### Usage example
|
||||||
```python
|
```python
|
||||||
from haystack import Document
|
from haystack import Document
|
||||||
from haystack.components.writers import DocumentWriter
|
from haystack.components.writers import DocumentWriter
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user