mirror of
https://github.com/microsoft/autogen.git
synced 2025-12-02 18:10:13 +00:00
add stream_options to openai model (#5788)
stream_options are not part of the model classes, so they won't get serialized when calling dump_component. Adding this to the model allows us to store the stream options when the component is serialized. --------- Signed-off-by: Peter Jausovec <peter.jausovec@solo.io> Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
This commit is contained in:
parent
679a9357f8
commit
a785cd90f9
@ -1112,6 +1112,7 @@ class OpenAIChatCompletionClient(BaseOpenAIChatCompletionClient, Component[OpenA
|
||||
"this is content" becomes "Reviewer said: this is content."
|
||||
This can be useful for models that do not support the `name` field in
|
||||
message. Defaults to False.
|
||||
stream_options (optional, dict): Additional options for streaming. Currently only `include_usage` is supported.
|
||||
|
||||
Examples:
|
||||
|
||||
|
||||
@ -10,6 +10,10 @@ class ResponseFormat(TypedDict):
|
||||
type: Literal["text", "json_object"]
|
||||
|
||||
|
||||
class StreamOptions(TypedDict):
|
||||
include_usage: bool
|
||||
|
||||
|
||||
class CreateArguments(TypedDict, total=False):
|
||||
frequency_penalty: Optional[float]
|
||||
logit_bias: Optional[Dict[str, int]]
|
||||
@ -22,6 +26,7 @@ class CreateArguments(TypedDict, total=False):
|
||||
temperature: Optional[float]
|
||||
top_p: Optional[float]
|
||||
user: str
|
||||
stream_options: Optional[StreamOptions]
|
||||
|
||||
|
||||
AsyncAzureADTokenProvider = Callable[[], Union[str, Awaitable[str]]]
|
||||
@ -67,6 +72,7 @@ class CreateArgumentsConfigModel(BaseModel):
|
||||
temperature: float | None = None
|
||||
top_p: float | None = None
|
||||
user: str | None = None
|
||||
stream_options: StreamOptions | None = None
|
||||
|
||||
|
||||
class BaseOpenAIClientConfigurationConfigModel(CreateArgumentsConfigModel):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user