mirror of
https://github.com/microsoft/autogen.git
synced 2025-12-26 22:48:40 +00:00
M1 docker (#5437)
Presently MagenticOne and the m1 CLI use the LocalCommandLineExecutor (presumably copied from the agbench code, which already runs in Docker). This pr defaults m1 to Docker, and adds a code_executor parameter to MagenticOne, which defaults to local for now to maintain backward compatibility -- but this behavior is immediately deprecated.
This commit is contained in:
parent
5fcb3b8061
commit
f20ba9127d
@ -5,6 +5,7 @@ from autogen_agentchat.agents import CodeExecutorAgent, UserProxyAgent
|
||||
from autogen_agentchat.base import ChatAgent
|
||||
from autogen_agentchat.teams import MagenticOneGroupChat
|
||||
from autogen_core import CancellationToken
|
||||
from autogen_core.code_executor import CodeExecutor
|
||||
from autogen_core.models import ChatCompletionClient
|
||||
|
||||
from autogen_ext.agents.file_surfer import FileSurfer
|
||||
@ -126,14 +127,24 @@ class MagenticOne(MagenticOneGroupChat):
|
||||
client: ChatCompletionClient,
|
||||
hil_mode: bool = False,
|
||||
input_func: InputFuncType | None = None,
|
||||
code_executor: CodeExecutor | None = None,
|
||||
):
|
||||
self.client = client
|
||||
self._validate_client_capabilities(client)
|
||||
|
||||
if code_executor is None:
|
||||
warnings.warn(
|
||||
"Instantiating MagenticOne without a code_executor is deprecated. Provide a code_executor to clear this warning (e.g., code_executor=LocalCommandLineCodeExecutor() ).",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
code_executor = LocalCommandLineCodeExecutor()
|
||||
|
||||
fs = FileSurfer("FileSurfer", model_client=client)
|
||||
ws = MultimodalWebSurfer("WebSurfer", model_client=client)
|
||||
coder = MagenticOneCoderAgent("Coder", model_client=client)
|
||||
executor = CodeExecutorAgent("Executor", code_executor=LocalCommandLineCodeExecutor())
|
||||
executor = CodeExecutorAgent("ComputerTerminal", code_executor=code_executor)
|
||||
|
||||
agents: List[ChatAgent] = [fs, ws, coder, executor]
|
||||
if hil_mode:
|
||||
user_proxy = UserProxyAgent("User", input_func=input_func)
|
||||
|
||||
@ -17,7 +17,7 @@ classifiers = [
|
||||
dependencies = [
|
||||
"pyyaml>=5.1",
|
||||
"autogen-agentchat>=0.4.4,<0.5",
|
||||
"autogen-ext[openai,magentic-one,rich]>=0.4.4,<0.5",
|
||||
"autogen-ext[docker,openai,magentic-one,rich]>=0.4.4,<0.5",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
@ -9,6 +9,7 @@ import yaml
|
||||
from autogen_agentchat.ui import Console, UserInputManager
|
||||
from autogen_core import CancellationToken
|
||||
from autogen_core.models import ChatCompletionClient
|
||||
from autogen_ext.code_executors.docker import DockerCommandLineCodeExecutor
|
||||
from autogen_ext.teams.magentic_one import MagenticOne
|
||||
from autogen_ext.ui import RichConsole
|
||||
|
||||
@ -113,12 +114,19 @@ def main() -> None:
|
||||
# Run the task
|
||||
async def run_task(task: str, hil_mode: bool, use_rich_console: bool) -> None:
|
||||
input_manager = UserInputManager(callback=cancellable_input)
|
||||
m1 = MagenticOne(client=client, hil_mode=hil_mode, input_func=input_manager.get_wrapped_callback())
|
||||
|
||||
if use_rich_console:
|
||||
await RichConsole(m1.run_stream(task=task), output_stats=False, user_input_manager=input_manager)
|
||||
else:
|
||||
await Console(m1.run_stream(task=task), output_stats=False, user_input_manager=input_manager)
|
||||
async with DockerCommandLineCodeExecutor(work_dir=os.getcwd()) as code_executor:
|
||||
m1 = MagenticOne(
|
||||
client=client,
|
||||
hil_mode=hil_mode,
|
||||
input_func=input_manager.get_wrapped_callback(),
|
||||
code_executor=code_executor,
|
||||
)
|
||||
|
||||
if use_rich_console:
|
||||
await RichConsole(m1.run_stream(task=task), output_stats=False, user_input_manager=input_manager)
|
||||
else:
|
||||
await Console(m1.run_stream(task=task), output_stats=False, user_input_manager=input_manager)
|
||||
|
||||
task = args.task if isinstance(args.task, str) else args.task[0]
|
||||
asyncio.run(run_task(task, not args.no_hil, args.rich))
|
||||
|
||||
4
python/uv.lock
generated
4
python/uv.lock
generated
@ -3647,7 +3647,7 @@ version = "0.2.3"
|
||||
source = { editable = "packages/magentic-one-cli" }
|
||||
dependencies = [
|
||||
{ name = "autogen-agentchat" },
|
||||
{ name = "autogen-ext", extra = ["magentic-one", "openai", "rich"] },
|
||||
{ name = "autogen-ext", extra = ["docker", "magentic-one", "openai", "rich"] },
|
||||
{ name = "pyyaml" },
|
||||
]
|
||||
|
||||
@ -3659,7 +3659,7 @@ dev = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "autogen-agentchat", editable = "packages/autogen-agentchat" },
|
||||
{ name = "autogen-ext", extras = ["openai", "magentic-one", "rich"], editable = "packages/autogen-ext" },
|
||||
{ name = "autogen-ext", extras = ["docker", "openai", "magentic-one", "rich"], editable = "packages/autogen-ext" },
|
||||
{ name = "pyyaml", specifier = ">=5.1" },
|
||||
]
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user