mirror of
https://github.com/microsoft/autogen.git
synced 2025-12-05 19:40:01 +00:00
fix: Normalize openai client stop reason to make more robust (#5027)
* Normalize stop reason to make more robust * format * add unknown finish reason
This commit is contained in:
parent
a7c97dbbbf
commit
c0082dd9cc
@ -52,7 +52,7 @@ class RequestUsage:
|
|||||||
completion_tokens: int
|
completion_tokens: int
|
||||||
|
|
||||||
|
|
||||||
FinishReasons = Literal["stop", "length", "function_calls", "content_filter"]
|
FinishReasons = Literal["stop", "length", "function_calls", "content_filter", "unknown"]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@ -30,6 +30,7 @@ from autogen_core import (
|
|||||||
Image,
|
Image,
|
||||||
MessageHandlerContext,
|
MessageHandlerContext,
|
||||||
)
|
)
|
||||||
|
from autogen_core.models import FinishReasons
|
||||||
from autogen_core.logging import LLMCallEvent
|
from autogen_core.logging import LLMCallEvent
|
||||||
from autogen_core.models import (
|
from autogen_core.models import (
|
||||||
AssistantMessage,
|
AssistantMessage,
|
||||||
@ -327,6 +328,21 @@ def assert_valid_name(name: str) -> str:
|
|||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_stop_reason(stop_reason: str | None) -> FinishReasons:
|
||||||
|
if stop_reason is None:
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
# Convert to lower case
|
||||||
|
stop_reason = stop_reason.lower()
|
||||||
|
|
||||||
|
KNOWN_STOP_MAPPINGS: Dict[str, FinishReasons] = {
|
||||||
|
"end_turn": "stop",
|
||||||
|
"tool_calls": "function_calls",
|
||||||
|
}
|
||||||
|
|
||||||
|
return KNOWN_STOP_MAPPINGS.get(stop_reason, "unknown")
|
||||||
|
|
||||||
|
|
||||||
class BaseOpenAIChatCompletionClient(ChatCompletionClient):
|
class BaseOpenAIChatCompletionClient(ChatCompletionClient):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -747,8 +763,8 @@ class BaseOpenAIChatCompletionClient(ChatCompletionClient):
|
|||||||
else:
|
else:
|
||||||
prompt_tokens = 0
|
prompt_tokens = 0
|
||||||
|
|
||||||
if stop_reason is None:
|
if stop_reason == "function_call":
|
||||||
raise ValueError("No stop reason found")
|
raise ValueError("Function calls are not supported in this context")
|
||||||
|
|
||||||
content: Union[str, List[FunctionCall]]
|
content: Union[str, List[FunctionCall]]
|
||||||
if len(content_deltas) > 1:
|
if len(content_deltas) > 1:
|
||||||
@ -770,13 +786,9 @@ class BaseOpenAIChatCompletionClient(ChatCompletionClient):
|
|||||||
prompt_tokens=prompt_tokens,
|
prompt_tokens=prompt_tokens,
|
||||||
completion_tokens=completion_tokens,
|
completion_tokens=completion_tokens,
|
||||||
)
|
)
|
||||||
if stop_reason == "function_call":
|
|
||||||
raise ValueError("Function calls are not supported in this context")
|
|
||||||
if stop_reason == "tool_calls":
|
|
||||||
stop_reason = "function_calls"
|
|
||||||
|
|
||||||
result = CreateResult(
|
result = CreateResult(
|
||||||
finish_reason=stop_reason, # type: ignore
|
finish_reason=normalize_stop_reason(stop_reason),
|
||||||
content=content,
|
content=content,
|
||||||
usage=usage,
|
usage=usage,
|
||||||
cached=False,
|
cached=False,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user