add stream_options to openai model (#5788)

stream_options are not part of the model classes, so they won't get
serialized when calling dump_component. Adding this to the model allows
us to store the stream options when the component is serialized.
---------

Signed-off-by: Peter Jausovec <peter.jausovec@solo.io>
Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
This commit is contained in:
Peter Jausovec 2025-03-03 13:58:05 -08:00 committed by GitHub
parent 679a9357f8
commit a785cd90f9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 7 additions and 0 deletions

View File

@ -1112,6 +1112,7 @@ class OpenAIChatCompletionClient(BaseOpenAIChatCompletionClient, Component[OpenA
"this is content" becomes "Reviewer said: this is content."
This can be useful for models that do not support the `name` field in
message. Defaults to False.
stream_options (optional, dict): Additional options for streaming. Currently only `include_usage` is supported.
Examples:

View File

@ -10,6 +10,10 @@ class ResponseFormat(TypedDict):
type: Literal["text", "json_object"]
class StreamOptions(TypedDict):
include_usage: bool
class CreateArguments(TypedDict, total=False):
frequency_penalty: Optional[float]
logit_bias: Optional[Dict[str, int]]
@ -22,6 +26,7 @@ class CreateArguments(TypedDict, total=False):
temperature: Optional[float]
top_p: Optional[float]
user: str
stream_options: Optional[StreamOptions]
AsyncAzureADTokenProvider = Callable[[], Union[str, Awaitable[str]]]
@ -67,6 +72,7 @@ class CreateArgumentsConfigModel(BaseModel):
temperature: float | None = None
top_p: float | None = None
user: str | None = None
stream_options: StreamOptions | None = None
class BaseOpenAIClientConfigurationConfigModel(CreateArgumentsConfigModel):