Migrate model context and models modules out of components (#4613)

* Move model context out of components

* move models out of components

* rename docs file
This commit is contained in:
Jack Gerrits 2024-12-09 13:00:08 -05:00 committed by GitHub
parent 3817b8ddf6
commit 87011ae01b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
79 changed files with 1527 additions and 1359 deletions

View File

@ -11,14 +11,14 @@ from typing import List
from autogen_core import AgentId, AgentProxy, TopicId
from autogen_core import SingleThreadedAgentRuntime
from autogen_core.logging import EVENT_LOGGER_NAME
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
UserMessage,
LLMMessage,
)
from autogen_core import DefaultSubscription, DefaultTopicId
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_core.components.models import AssistantMessage
from autogen_core.models import AssistantMessage
from autogen_magentic_one.markdown_browser import MarkdownConverter, UnsupportedFormatException
from autogen_magentic_one.agents.coder import Coder, Executor

View File

@ -11,7 +11,7 @@ from typing import List
from autogen_core import AgentId, AgentProxy, TopicId
from autogen_core import SingleThreadedAgentRuntime
from autogen_core import EVENT_LOGGER_NAME
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
ModelCapabilities,
UserMessage,
@ -19,7 +19,7 @@ from autogen_core.components.models import (
)
from autogen_core import DefaultSubscription, DefaultTopicId
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_core.components.models import AssistantMessage
from autogen_core.models import AssistantMessage
from autogen_magentic_one.markdown_browser import MarkdownConverter, UnsupportedFormatException
from autogen_magentic_one.agents.coder import Coder, Executor

View File

@ -6,7 +6,7 @@ from autogen_core import SingleThreadedAgentRuntime
from autogen_core import EVENT_LOGGER_NAME
from autogen_core import DefaultSubscription, DefaultTopicId
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_core.components.models import (
from autogen_core.models import (
UserMessage,
)

View File

@ -12,7 +12,7 @@ from autogen_core import SingleThreadedAgentRuntime
from autogen_core import EVENT_LOGGER_NAME
from autogen_core import DefaultSubscription, DefaultTopicId
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
UserMessage,
SystemMessage,

View File

@ -5,7 +5,8 @@ import warnings
from typing import Any, AsyncGenerator, Awaitable, Callable, Dict, List, Mapping, Sequence
from autogen_core import CancellationToken, FunctionCall
from autogen_core.components.models import (
from autogen_core.components.tools import FunctionTool, Tool
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
FunctionExecutionResult,
@ -14,7 +15,6 @@ from autogen_core.components.models import (
SystemMessage,
UserMessage,
)
from autogen_core.components.tools import FunctionTool, Tool
from typing_extensions import deprecated
from .. import EVENT_LOGGER_NAME

View File

@ -1,6 +1,6 @@
import warnings
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
)

View File

@ -1,8 +1,8 @@
from typing import AsyncGenerator, List, Sequence
from autogen_core import CancellationToken, Image
from autogen_core.components.models import ChatCompletionClient
from autogen_core.components.models._types import SystemMessage
from autogen_core.models import ChatCompletionClient
from autogen_core.models._types import SystemMessage
from autogen_agentchat.base import Response

View File

@ -2,10 +2,10 @@ import logging
import warnings
from typing import Any, Awaitable, Callable, List
from autogen_core.components.models import (
from autogen_core.components.tools import Tool
from autogen_core.models import (
ChatCompletionClient,
)
from autogen_core.components.tools import Tool
from .. import EVENT_LOGGER_NAME
from ._assistant_agent import AssistantAgent

View File

@ -1,7 +1,7 @@
from typing import List, Literal
from autogen_core import FunctionCall, Image
from autogen_core.components.models import FunctionExecutionResult, RequestUsage
from autogen_core.models import FunctionExecutionResult, RequestUsage
from pydantic import BaseModel, ConfigDict, Field
from typing_extensions import Annotated

View File

@ -1,6 +1,6 @@
from typing import Any, List, Mapping, Optional
from autogen_core.components.models import (
from autogen_core.models import (
LLMMessage,
)
from pydantic import BaseModel, Field

View File

@ -1,7 +1,7 @@
import logging
from typing import Callable, List
from autogen_core.components.models import ChatCompletionClient
from autogen_core.models import ChatCompletionClient
from .... import EVENT_LOGGER_NAME, TRACE_LOGGER_NAME
from ....base import ChatAgent, TerminationCondition

View File

@ -3,7 +3,7 @@ import logging
from typing import Any, Dict, List, Mapping
from autogen_core import AgentId, CancellationToken, DefaultTopicId, Image, MessageContext, event, rpc
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,
@ -22,7 +22,6 @@ from ....messages import (
ToolCallMessage,
ToolCallResultMessage,
)
from ....state import MagenticOneOrchestratorState
from .._base_group_chat_manager import BaseGroupChatManager
from .._events import (

View File

@ -2,7 +2,7 @@ import logging
import re
from typing import Any, Callable, Dict, List, Mapping, Sequence
from autogen_core.components.models import ChatCompletionClient, SystemMessage
from autogen_core.models import ChatCompletionClient, SystemMessage
from ... import TRACE_LOGGER_NAME
from ...base import ChatAgent, TerminationCondition

View File

@ -4,7 +4,7 @@ import time
from typing import AsyncGenerator, List, Optional, TypeVar, cast
from autogen_core import Image
from autogen_core.components.models import RequestUsage
from autogen_core.models import RequestUsage
from autogen_agentchat.base import Response, TaskResult
from autogen_agentchat.messages import AgentMessage, MultiModalMessage

View File

@ -33,8 +33,8 @@ from autogen_agentchat.teams._group_chat._selector_group_chat import SelectorGro
from autogen_agentchat.teams._group_chat._swarm_group_chat import SwarmGroupChatManager
from autogen_agentchat.ui import Console
from autogen_core import AgentId, CancellationToken, FunctionCall
from autogen_core.components.models import FunctionExecutionResult
from autogen_core.components.tools import FunctionTool
from autogen_core.models import FunctionExecutionResult
from autogen_ext.code_executors.local import LocalCommandLineCodeExecutor
from autogen_ext.models import OpenAIChatCompletionClient, ReplayChatCompletionClient
from openai.resources.chat.completions import AsyncCompletions

View File

@ -13,7 +13,7 @@ from autogen_agentchat.conditions import (
TokenUsageTermination,
)
from autogen_agentchat.messages import HandoffMessage, StopMessage, TextMessage
from autogen_core.components.models import RequestUsage
from autogen_core.models import RequestUsage
@pytest.mark.asyncio

View File

@ -27,8 +27,8 @@ python/autogen_agentchat.state
python/autogen_core
python/autogen_core.code_executor
python/autogen_core.components.models
python/autogen_core.components.model_context
python/autogen_core.models
python/autogen_core.model_context
python/autogen_core.components.tools
python/autogen_core.components.tool_agent
python/autogen_core.exceptions

View File

@ -1,8 +1,8 @@
autogen\_core.components.model\_context
autogen\_core.model\_context
=======================================
.. automodule:: autogen_core.components.model_context
.. automodule:: autogen_core.model_context
:members:
:undoc-members:
:show-inheritance:

View File

@ -1,8 +1,8 @@
autogen\_core.components.models
autogen\_core.models
===============================
.. automodule:: autogen_core.components.models
.. automodule:: autogen_core.models
:members:
:undoc-members:
:show-inheritance:

View File

@ -73,7 +73,7 @@
}
],
"source": [
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"\n",
"result = await opneai_model_client.create([UserMessage(content=\"What is the capital of France?\", source=\"user\")])\n",
"print(result)"

View File

@ -49,7 +49,7 @@
" message_handler,\n",
")\n",
"from autogen_core.components.model_context import BufferedChatCompletionContext\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" SystemMessage,\n",

View File

@ -65,7 +65,7 @@
"import os\n",
"from typing import Optional\n",
"\n",
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
"\n",
"\n",

View File

@ -29,13 +29,13 @@
" message_handler,\n",
")\n",
"from autogen_core.base.intervention import DefaultInterventionHandler, DropMessage\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import PythonCodeExecutionTool, ToolSchema\n",
"from autogen_core.models import (\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import PythonCodeExecutionTool, ToolSchema\n",
"from autogen_core.tool_agent import ToolAgent, ToolException, tool_agent_caller_loop\n",
"from autogen_ext.code_executors.docker import DockerCommandLineCodeExecutor\n",
"from autogen_ext.models import OpenAIChatCompletionClient"

View File

@ -54,7 +54,7 @@
")\n",
"from autogen_core._default_subscription import DefaultSubscription\n",
"from autogen_core._default_topic import DefaultTopicId\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" SystemMessage,\n",
")"
]

View File

@ -65,7 +65,8 @@
" TypeSubscription,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import FunctionTool, Tool\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" FunctionExecutionResult,\n",
@ -74,7 +75,6 @@
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import FunctionTool, Tool\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"from pydantic import BaseModel"
]
@ -120,7 +120,7 @@
"\n",
"We start with the `AIAgent` class, which is the class for all AI agents \n",
"(i.e., Triage, Sales, and Issue and Repair Agents) in the multi-agent chatbot.\n",
"An `AIAgent` uses a {py:class}`~autogen_core.components.models.ChatCompletionClient`\n",
"An `AIAgent` uses a {py:class}`~autogen_core.models.ChatCompletionClient`\n",
"to generate responses.\n",
"It can use regular tools directly or delegate tasks to other agents using `delegate_tools`.\n",
"It subscribes to topic type `agent_topic_type` to receive messages from the customer,\n",

View File

@ -39,7 +39,7 @@
"from typing import List\n",
"\n",
"from autogen_core import AgentId, MessageContext, RoutedAgent, SingleThreadedAgentRuntime, message_handler\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient"
]
},

View File

@ -52,7 +52,7 @@
" default_subscription,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",

View File

@ -101,7 +101,7 @@
"from typing import Dict, List, Union\n",
"\n",
"from autogen_core import MessageContext, RoutedAgent, TopicId, default_subscription, message_handler\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
@ -258,7 +258,7 @@
"- It stores message histories for different `CodeWritingTask` in a dictionary,\n",
"so each task has its own history.\n",
"- When making an LLM inference request using its model client, it transforms\n",
"the message history into a list of {py:class}`autogen_core.components.models.LLMMessage` objects\n",
"the message history into a list of {py:class}`autogen_core.models.LLMMessage` objects\n",
"to pass to the model client.\n",
"\n",
"The reviewer agent subscribes to the `CodeReviewTask` message and publishes the `CodeReviewResult` message."

View File

@ -57,7 +57,7 @@
" message_handler,\n",
" type_subscription,\n",
")\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient"
]
},

View File

@ -6,9 +6,9 @@
"source": [
"# Model Clients\n",
"\n",
"AutoGen provides the {py:mod}`autogen_core.components.models` module with a suite of built-in\n",
"AutoGen provides the {py:mod}`autogen_core.models` module with a suite of built-in\n",
"model clients for using ChatCompletion API.\n",
"All model clients implement the {py:class}`~autogen_core.components.models.ChatCompletionClient` protocol class."
"All model clients implement the {py:class}`~autogen_core.models.ChatCompletionClient` protocol class."
]
},
{
@ -32,7 +32,7 @@
"metadata": {},
"outputs": [],
"source": [
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
"# Create an OpenAI model client.\n",
@ -47,7 +47,7 @@
"metadata": {},
"source": [
"You can call the {py:meth}`~autogen_ext.models.OpenAIChatCompletionClient.create` method to create a\n",
"chat completion request, and await for an {py:class}`~autogen_core.components.models.CreateResult` object in return."
"chat completion request, and await for an {py:class}`~autogen_core.models.CreateResult` object in return."
]
},
{
@ -168,7 +168,7 @@
"source": [
"```{note}\n",
"The last response in the streaming response is always the final response\n",
"of the type {py:class}`~autogen_core.components.models.CreateResult`.\n",
"of the type {py:class}`~autogen_core.models.CreateResult`.\n",
"```\n",
"\n",
"**NB the default usage response is to return zero values**"
@ -333,7 +333,7 @@
"from dataclasses import dataclass\n",
"\n",
"from autogen_core import MessageContext, RoutedAgent, SingleThreadedAgentRuntime, message_handler\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
"\n",
@ -474,7 +474,7 @@
"outputs": [],
"source": [
"from autogen_core.components.model_context import BufferedChatCompletionContext\n",
"from autogen_core.components.models import AssistantMessage\n",
"from autogen_core.models import AssistantMessage\n",
"\n",
"\n",
"class SimpleAgentWithContext(RoutedAgent):\n",

View File

@ -163,13 +163,13 @@
" SingleThreadedAgentRuntime,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import FunctionTool, Tool, ToolSchema\n",
"from autogen_core.models import (\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import FunctionTool, Tool, ToolSchema\n",
"from autogen_core.tool_agent import ToolAgent, tool_agent_caller_loop\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
@ -267,7 +267,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"This example uses the {py:class}`autogen_core.components.models.OpenAIChatCompletionClient`,\n",
"This example uses the {py:class}`autogen_core.models.OpenAIChatCompletionClient`,\n",
"for Azure OpenAI and other clients, see [Model Clients](./model-clients.ipynb).\n",
"Let's test the agent with a question about stock price."
]

View File

@ -37,7 +37,7 @@
"\n",
"from autogen_core import DefaultTopicId, MessageContext, RoutedAgent, default_subscription, message_handler\n",
"from autogen_core.code_executor import CodeBlock, CodeExecutor\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",

View File

@ -15,9 +15,9 @@ from autogen_core import (
DefaultTopicId,
SingleThreadedAgentRuntime,
)
from autogen_core.components.model_context import BufferedChatCompletionContext
from autogen_core.components.models import SystemMessage
from autogen_core.components.tools import FunctionTool
from autogen_core.model_context import BufferedChatCompletionContext
from autogen_core.models import SystemMessage
from chess import BLACK, SQUARE_NAMES, WHITE, Board, Move
from chess import piece_name as get_piece_name
from common.agents._chat_completion_agent import ChatCompletionAgent

View File

@ -11,8 +11,9 @@ from autogen_core import (
RoutedAgent,
message_handler,
)
from autogen_core.components.model_context import ChatCompletionContext
from autogen_core.components.models import (
from autogen_core.components.tools import Tool
from autogen_core.model_context import ChatCompletionContext
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
FunctionExecutionResult,
@ -20,7 +21,6 @@ from autogen_core.components.models import (
SystemMessage,
UserMessage,
)
from autogen_core.components.tools import Tool
from ..types import (
FunctionCallMessage,

View File

@ -2,8 +2,8 @@ import logging
from typing import Any, Callable, List, Mapping
from autogen_core import AgentId, AgentProxy, MessageContext, RoutedAgent, message_handler
from autogen_core.components.model_context import ChatCompletionContext
from autogen_core.components.models import ChatCompletionClient, UserMessage
from autogen_core.model_context import ChatCompletionContext
from autogen_core.models import ChatCompletionClient, UserMessage
from ..types import (
MultiModalMessage,

View File

@ -4,8 +4,8 @@ import re
from typing import Dict, List
from autogen_core import AgentProxy
from autogen_core.components.model_context import ChatCompletionContext
from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage
from autogen_core.model_context import ChatCompletionContext
from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage
async def select_speaker(context: ChatCompletionContext, client: ChatCompletionClient, agents: List[AgentProxy]) -> int:

View File

@ -5,7 +5,7 @@ from enum import Enum
from typing import List, Union
from autogen_core import FunctionCall, Image
from autogen_core.components.models import FunctionExecutionResultMessage
from autogen_core.models import FunctionExecutionResultMessage
@dataclass(kw_only=True)

View File

@ -1,7 +1,7 @@
import os
from typing import Any, List, Optional, Union
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
FunctionExecutionResult,

View File

@ -5,7 +5,7 @@ from uuid import uuid4
from _types import GroupChatMessage, MessageChunk, RequestToSpeak, UIAgentConfig
from autogen_core import DefaultTopicId, MessageContext, RoutedAgent, message_handler
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,

View File

@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Dict
from autogen_core.components.models import (
from autogen_core.models import (
LLMMessage,
)
from autogen_ext.models import AzureOpenAIClientConfiguration

View File

@ -42,14 +42,14 @@ from autogen_core import (
type_subscription,
)
from autogen_core.base.intervention import DefaultInterventionHandler
from autogen_core.components.model_context import BufferedChatCompletionContext
from autogen_core.components.models import (
from autogen_core.components.tools import BaseTool
from autogen_core.model_context import BufferedChatCompletionContext
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
SystemMessage,
UserMessage,
)
from autogen_core.components.tools import BaseTool
from common.types import TextMessage
from common.utils import get_chat_completion_client_from_envs
from pydantic import BaseModel, Field

View File

@ -1,9 +1,32 @@
from ._buffered_chat_completion_context import BufferedChatCompletionContext
from ._chat_completion_context import ChatCompletionContext
from ._head_and_tail_chat_completion_context import HeadAndTailChatCompletionContext
from typing_extensions import deprecated
from ...model_context import BufferedChatCompletionContext as BufferedChatCompletionContextAlias
from ...model_context import ChatCompletionContext as ChatCompletionContextAlias
from ...model_context import HeadAndTailChatCompletionContext as HeadAndTailChatCompletionContextAlias
__all__ = [
"ChatCompletionContext",
"BufferedChatCompletionContext",
"HeadAndTailChatCompletionContext",
]
@deprecated(
"autogen_core.components.model_context.BufferedChatCompletionContextAlias moved to autogen_core.model_context.BufferedChatCompletionContextAlias. This alias will be removed in 0.4.0."
)
class BufferedChatCompletionContext(BufferedChatCompletionContextAlias):
pass
@deprecated(
"autogen_core.components.model_context.HeadAndTailChatCompletionContextAlias moved to autogen_core.model_context.HeadAndTailChatCompletionContextAlias. This alias will be removed in 0.4.0."
)
class HeadAndTailChatCompletionContext(HeadAndTailChatCompletionContextAlias):
pass
@deprecated(
"autogen_core.components.model_context.ChatCompletionContextAlias moved to autogen_core.model_context.ChatCompletionContextAlias. This alias will be removed in 0.4.0."
)
class ChatCompletionContext(ChatCompletionContextAlias):
pass

View File

@ -1,17 +1,124 @@
from ._model_client import ChatCompletionClient, ModelCapabilities
from ._types import (
AssistantMessage,
ChatCompletionTokenLogprob,
CreateResult,
FinishReasons,
FunctionExecutionResult,
FunctionExecutionResultMessage,
LLMMessage,
RequestUsage,
SystemMessage,
TopLogprob,
UserMessage,
from typing_extensions import deprecated
from ...models import (
AssistantMessage as AssistantMessageAlias,
)
from ...models import ChatCompletionClient as ChatCompletionClientAlias
from ...models import (
ChatCompletionTokenLogprob as ChatCompletionTokenLogprobAlias,
)
from ...models import (
CreateResult as CreateResultAlias,
)
from ...models import (
FinishReasons as FinishReasonsAlias,
)
from ...models import (
FunctionExecutionResult as FunctionExecutionResultAlias,
)
from ...models import (
FunctionExecutionResultMessage as FunctionExecutionResultMessageAlias,
)
from ...models import (
LLMMessage as LLMMessageAlias,
)
from ...models import ModelCapabilities as ModelCapabilitiesAlias
from ...models import (
RequestUsage as RequestUsageAlias,
)
from ...models import (
SystemMessage as SystemMessageAlias,
)
from ...models import (
TopLogprob as TopLogprobAlias,
)
from ...models import (
UserMessage as UserMessageAlias,
)
@deprecated(
"autogen_core.models.ChatCompletionClient moved to autogen_core.models.ChatCompletionClient. This alias will be removed in 0.4.0."
)
class ChatCompletionClient(ChatCompletionClientAlias):
pass
@deprecated(
"autogen_core.models.ModelCapabilities moved to autogen_core.models.ModelCapabilities. This alias will be removed in 0.4.0."
)
class ModelCapabilities(ModelCapabilitiesAlias):
pass
@deprecated(
"autogen_core.models.SystemMessage moved to autogen_core.models.SystemMessage. This alias will be removed in 0.4.0."
)
class SystemMessage(SystemMessageAlias):
pass
@deprecated(
"autogen_core.models.UserMessage moved to autogen_core.models.UserMessage. This alias will be removed in 0.4.0."
)
class UserMessage(UserMessageAlias):
pass
@deprecated(
"autogen_core.models.AssistantMessage moved to autogen_core.models.AssistantMessage. This alias will be removed in 0.4.0."
)
class AssistantMessage(AssistantMessageAlias):
pass
@deprecated(
"autogen_core.models.FunctionExecutionResult moved to autogen_core.models.FunctionExecutionResult. This alias will be removed in 0.4.0."
)
class FunctionExecutionResult(FunctionExecutionResultAlias):
pass
@deprecated(
"autogen_core.models.FunctionExecutionResultMessage moved to autogen_core.models.FunctionExecutionResultMessage. This alias will be removed in 0.4.0."
)
class FunctionExecutionResultMessage(FunctionExecutionResultMessageAlias):
pass
LLMMessage = LLMMessageAlias
@deprecated(
"autogen_core.models.RequestUsage moved to autogen_core.models.RequestUsage. This alias will be removed in 0.4.0."
)
class RequestUsage(RequestUsageAlias):
pass
FinishReasons = FinishReasonsAlias
@deprecated(
"autogen_core.models.CreateResult moved to autogen_core.models.CreateResult. This alias will be removed in 0.4.0."
)
class CreateResult(CreateResultAlias):
pass
@deprecated(
"autogen_core.models.TopLogprob moved to autogen_core.models.TopLogprob. This alias will be removed in 0.4.0."
)
class TopLogprob(TopLogprobAlias):
pass
@deprecated(
"autogen_core.models.ChatCompletionTokenLogprob moved to autogen_core.models.ChatCompletionTokenLogprob. This alias will be removed in 0.4.0."
)
class ChatCompletionTokenLogprob(ChatCompletionTokenLogprobAlias):
pass
__all__ = [
"ModelCapabilities",

View File

@ -2,7 +2,7 @@ import asyncio
from typing import List
from ... import AgentId, AgentRuntime, BaseAgent, CancellationToken, FunctionCall
from ..models import (
from ...models import (
AssistantMessage,
ChatCompletionClient,
FunctionExecutionResult,

View File

@ -3,7 +3,7 @@ from dataclasses import dataclass
from typing import List
from ... import FunctionCall, MessageContext, RoutedAgent, message_handler
from ..models import FunctionExecutionResult
from ...models import FunctionExecutionResult
from ..tools import Tool
__all__ = [

View File

@ -0,0 +1,9 @@
from ._buffered_chat_completion_context import BufferedChatCompletionContext
from ._chat_completion_context import ChatCompletionContext
from ._head_and_tail_chat_completion_context import HeadAndTailChatCompletionContext
__all__ = [
"ChatCompletionContext",
"BufferedChatCompletionContext",
"HeadAndTailChatCompletionContext",
]

View File

@ -1,6 +1,6 @@
from typing import Any, List, Mapping
from ..._types import FunctionCall
from .._types import FunctionCall
from ..models import AssistantMessage, FunctionExecutionResultMessage, LLMMessage, UserMessage
from ._chat_completion_context import ChatCompletionContext

View File

@ -0,0 +1,30 @@
from ._model_client import ChatCompletionClient, ModelCapabilities
from ._types import (
AssistantMessage,
ChatCompletionTokenLogprob,
CreateResult,
FinishReasons,
FunctionExecutionResult,
FunctionExecutionResultMessage,
LLMMessage,
RequestUsage,
SystemMessage,
TopLogprob,
UserMessage,
)
__all__ = [
"ModelCapabilities",
"ChatCompletionClient",
"SystemMessage",
"UserMessage",
"AssistantMessage",
"FunctionExecutionResult",
"FunctionExecutionResultMessage",
"LLMMessage",
"RequestUsage",
"FinishReasons",
"CreateResult",
"TopLogprob",
"ChatCompletionTokenLogprob",
]

View File

@ -11,8 +11,8 @@ from typing_extensions import (
Union,
)
from ... import CancellationToken
from ..tools import Tool, ToolSchema
from .. import CancellationToken
from ..components.tools import Tool, ToolSchema
from ._types import CreateResult, LLMMessage, RequestUsage

View File

@ -4,7 +4,7 @@ from typing import List, Literal, Optional, Union
from pydantic import BaseModel, Field
from typing_extensions import Annotated
from ... import FunctionCall, Image
from .. import FunctionCall, Image
class SystemMessage(BaseModel):

View File

@ -1,8 +1,8 @@
from typing import List
import pytest
from autogen_core.components.model_context import BufferedChatCompletionContext, HeadAndTailChatCompletionContext
from autogen_core.components.models import AssistantMessage, LLMMessage, UserMessage
from autogen_core.model_context import BufferedChatCompletionContext, HeadAndTailChatCompletionContext
from autogen_core.models import AssistantMessage, LLMMessage, UserMessage
@pytest.mark.asyncio

View File

@ -4,7 +4,15 @@ from typing import Any, AsyncGenerator, List, Mapping, Optional, Sequence, Union
import pytest
from autogen_core import AgentId, CancellationToken, FunctionCall, SingleThreadedAgentRuntime
from autogen_core.components.models import (
from autogen_core.components.tool_agent import (
InvalidToolArgumentsException,
ToolAgent,
ToolExecutionException,
ToolNotFoundException,
tool_agent_caller_loop,
)
from autogen_core.components.tools import FunctionTool, Tool, ToolSchema
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
CreateResult,
@ -15,14 +23,6 @@ from autogen_core.components.models import (
RequestUsage,
UserMessage,
)
from autogen_core.components.tool_agent import (
InvalidToolArgumentsException,
ToolAgent,
ToolExecutionException,
ToolNotFoundException,
tool_agent_caller_loop,
)
from autogen_core.components.tools import FunctionTool, Tool, ToolSchema
def _pass_function(input: str) -> str:

View File

@ -10,7 +10,7 @@ from autogen_agentchat.messages import (
TextMessage,
)
from autogen_core import CancellationToken, FunctionCall
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,

View File

@ -1,5 +1,5 @@
from autogen_agentchat.agents import AssistantAgent
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
)

View File

@ -33,8 +33,8 @@ from autogen_agentchat.messages import (
ToolCallResultMessage,
)
from autogen_core import CancellationToken, FunctionCall
from autogen_core.components.models._types import FunctionExecutionResult
from autogen_core.components.tools import FunctionTool, Tool
from autogen_core.models._types import FunctionExecutionResult
_has_openai_dependencies: bool = True
try:

View File

@ -1,8 +1,8 @@
from typing import Any, Awaitable, Callable, List, Optional
from autogen_agentchat.agents import AssistantAgent
from autogen_core.components.models import ChatCompletionClient
from autogen_core.components.tools import Tool
from autogen_core.models import ChatCompletionClient
from .tools import (
extract_audio,

View File

@ -6,7 +6,7 @@ import ffmpeg
import numpy as np
import whisper
from autogen_core import Image as AGImage
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
UserMessage,
)

View File

@ -28,7 +28,7 @@ from autogen_agentchat.base import Response
from autogen_agentchat.messages import ChatMessage, MultiModalMessage, TextMessage
from autogen_core import EVENT_LOGGER_NAME, CancellationToken, FunctionCall
from autogen_core import Image as AGImage
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,

View File

@ -1,7 +1,7 @@
from typing import Any, Dict, List, TypedDict, Union
from autogen_core import FunctionCall, Image
from autogen_core.components.models import FunctionExecutionResult
from autogen_core.models import FunctionExecutionResult
UserContent = Union[str, List[Union[str, Image]]]
AssistantContent = Union[str, List[FunctionCall]]

View File

@ -1,6 +1,6 @@
from typing import Dict
from autogen_core.components.models import ModelCapabilities
from autogen_core.models import ModelCapabilities
# Based on: https://platform.openai.com/docs/models/continuous-model-upgrades
# This is a moving target, so correctness is checked by the model value returned by openai against expected values at runtime``

View File

@ -28,7 +28,9 @@ from autogen_core import (
FunctionCall,
Image,
)
from autogen_core.components.models import (
from autogen_core.components.tools import Tool, ToolSchema
from autogen_core.logging import LLMCallEvent
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
ChatCompletionTokenLogprob,
@ -41,8 +43,6 @@ from autogen_core.components.models import (
TopLogprob,
UserMessage,
)
from autogen_core.components.tools import Tool, ToolSchema
from autogen_core.logging import LLMCallEvent
from openai import AsyncAzureOpenAI, AsyncOpenAI
from openai.types.chat import (
ChatCompletion,
@ -916,7 +916,7 @@ class OpenAIChatCompletionClient(BaseOpenAIChatCompletionClient):
.. code-block:: python
from autogen_ext.models import OpenAIChatCompletionClient
from autogen_core.components.models import UserMessage
from autogen_core.models import UserMessage
openai_client = OpenAIChatCompletionClient(
model="gpt-4o-2024-08-06",

View File

@ -1,6 +1,6 @@
from typing import Awaitable, Callable, Dict, List, Literal, Optional, Union
from autogen_core.components.models import ModelCapabilities
from autogen_core.models import ModelCapabilities
from typing_extensions import Required, TypedDict

View File

@ -4,14 +4,14 @@ import logging
from typing import Any, AsyncGenerator, List, Mapping, Optional, Sequence, Union
from autogen_core import EVENT_LOGGER_NAME, CancellationToken
from autogen_core.components.models import (
from autogen_core.components.tools import Tool, ToolSchema
from autogen_core.models import (
ChatCompletionClient,
CreateResult,
LLMMessage,
ModelCapabilities,
RequestUsage,
)
from autogen_core.components.tools import Tool, ToolSchema
logger = logging.getLogger(EVENT_LOGGER_NAME)
@ -38,7 +38,7 @@ class ReplayChatCompletionClient:
.. code-block:: python
from autogen_ext.models import ReplayChatCompletionClient
from autogen_core.components.models import UserMessage
from autogen_core.models import UserMessage
async def example():
@ -58,7 +58,7 @@ class ReplayChatCompletionClient:
import asyncio
from autogen_ext.models import ReplayChatCompletionClient
from autogen_core.components.models import UserMessage
from autogen_core.models import UserMessage
async def example():
@ -84,7 +84,7 @@ class ReplayChatCompletionClient:
import asyncio
from autogen_ext.models import ReplayChatCompletionClient
from autogen_core.components.models import UserMessage
from autogen_core.models import UserMessage
async def example():

View File

@ -4,7 +4,8 @@ from unittest.mock import MagicMock
import pytest
from autogen_core import CancellationToken, Image
from autogen_core.components.models import (
from autogen_core.components.tools import BaseTool, FunctionTool
from autogen_core.models import (
AssistantMessage,
CreateResult,
FunctionExecutionResult,
@ -14,7 +15,6 @@ from autogen_core.components.models import (
SystemMessage,
UserMessage,
)
from autogen_core.components.tools import BaseTool, FunctionTool
from autogen_ext.models import AzureOpenAIChatCompletionClient, OpenAIChatCompletionClient
from autogen_ext.models._openai._model_info import resolve_model
from autogen_ext.models._openai._openai_client import calculate_vision_tokens, convert_tools

View File

@ -12,7 +12,7 @@ from autogen_core import (
default_subscription,
message_handler,
)
from autogen_core.components.models import ChatCompletionClient, CreateResult, SystemMessage, UserMessage
from autogen_core.models import ChatCompletionClient, CreateResult, SystemMessage, UserMessage
from autogen_ext.models import ReplayChatCompletionClient

View File

@ -20,7 +20,7 @@ from autogen_magentic_one.agents.orchestrator import LedgerOrchestrator
from autogen_magentic_one.agents.user_proxy import UserProxy
from autogen_magentic_one.messages import BroadcastMessage
from autogen_magentic_one.utils import LogHandler, create_completion_client_from_env
from autogen_core.components.models import UserMessage
from autogen_core.models import UserMessage
from threading import Lock

View File

@ -3,7 +3,7 @@ import time
from typing import List, Optional
from autogen_core import EVENT_LOGGER_NAME, AgentProxy, CancellationToken, MessageContext
from autogen_core.components.models import AssistantMessage, LLMMessage, UserMessage
from autogen_core.models import AssistantMessage, LLMMessage, UserMessage
from ..messages import BroadcastMessage, OrchestrationEvent, RequestReplyMessage, ResetMessage
from ..utils import message_content_to_str

View File

@ -1,7 +1,7 @@
from typing import List, Tuple
from autogen_core import CancellationToken, MessageContext, TopicId
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
LLMMessage,
UserMessage,

View File

@ -3,7 +3,7 @@ from typing import Awaitable, Callable, List, Literal, Tuple, Union
from autogen_core import CancellationToken, default_subscription
from autogen_core.code_executor import CodeBlock, CodeExecutor
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
SystemMessage,
UserMessage,

View File

@ -3,7 +3,7 @@ import time
from typing import List, Optional, Tuple
from autogen_core import CancellationToken, FunctionCall, default_subscription
from autogen_core.components.models import (
from autogen_core.models import (
ChatCompletionClient,
SystemMessage,
UserMessage,

View File

@ -14,7 +14,7 @@ from urllib.parse import quote_plus # parse_qs, quote, unquote, urlparse, urlun
import aiofiles
from autogen_core import EVENT_LOGGER_NAME, CancellationToken, FunctionCall, default_subscription
from autogen_core import Image as AGImage
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,

View File

@ -2,7 +2,7 @@ import json
from typing import Any, Dict, List, Optional
from autogen_core import AgentProxy, CancellationToken, MessageContext, TopicId, default_subscription
from autogen_core.components.models import (
from autogen_core.models import (
AssistantMessage,
ChatCompletionClient,
LLMMessage,

View File

@ -2,7 +2,7 @@ from dataclasses import dataclass
from typing import Any, Dict, List, Union
from autogen_core import FunctionCall, Image
from autogen_core.components.models import FunctionExecutionResult, LLMMessage
from autogen_core.models import FunctionExecutionResult, LLMMessage
from pydantic import BaseModel
# Convenience type

View File

@ -6,11 +6,11 @@ from datetime import datetime
from typing import Any, Dict, List, Literal
from autogen_core import Image
from autogen_core.components.models import (
from autogen_core.logging import LLMCallEvent
from autogen_core.models import (
ChatCompletionClient,
ModelCapabilities,
)
from autogen_core.logging import LLMCallEvent
from autogen_ext.models import AzureOpenAIChatCompletionClient, OpenAIChatCompletionClient
from .messages import (

View File

@ -9,10 +9,10 @@ from typing import Mapping
import pytest
from autogen_core import AgentId, AgentProxy, FunctionCall, SingleThreadedAgentRuntime
from autogen_core.components.models import (
from autogen_core.components.tools._base import ToolSchema
from autogen_core.models import (
UserMessage,
)
from autogen_core.components.tools._base import ToolSchema
from autogen_magentic_one.agents.multimodal_web_surfer import MultimodalWebSurfer
from autogen_magentic_one.agents.multimodal_web_surfer.tool_definitions import (
TOOL_PAGE_DOWN,

View File

@ -8,11 +8,11 @@ import aiofiles
import yaml
from autogen_agentchat.agents import AssistantAgent, UserProxyAgent
from autogen_agentchat.conditions import MaxMessageTermination, StopMessageTermination, TextMentionTermination
from autogen_agentchat.teams import RoundRobinGroupChat, SelectorGroupChat, MagenticOneGroupChat
from autogen_agentchat.teams import MagenticOneGroupChat, RoundRobinGroupChat, SelectorGroupChat
from autogen_core.components.tools import FunctionTool
from autogen_ext.agents.web_surfer import MultimodalWebSurfer
from autogen_ext.agents.file_surfer import FileSurfer
from autogen_ext.agents.magentic_one import MagenticOneCoderAgent
from autogen_ext.agents.web_surfer import MultimodalWebSurfer
from autogen_ext.models import OpenAIChatCompletionClient
from ..datamodel.types import (