mirror of
https://github.com/microsoft/autogen.git
synced 2025-12-31 09:11:23 +00:00
<!-- Thank you for your contribution! Please review https://microsoft.github.io/autogen/docs/Contribute before opening a pull request. --> <!-- Please add a reviewer to the assignee section when you create a PR. If you don't have the access to it, we will shortly find a reviewer and assign them to your PR. --> ## Why are these changes needed? <!-- Please give a short summary of the change and the problem this solves. --> Shows an example of how to use the `Memory` interface to implement a just-in-time vector memory based on chromadb. ```python import os from pathlib import Path from autogen_agentchat.agents import AssistantAgent from autogen_agentchat.ui import Console from autogen_core.memory import MemoryContent, MemoryMimeType from autogen_ext.memory.chromadb import ChromaDBVectorMemory, PersistentChromaDBVectorMemoryConfig from autogen_ext.models.openai import OpenAIChatCompletionClient # Initialize ChromaDB memory with custom config chroma_user_memory = ChromaDBVectorMemory( config=PersistentChromaDBVectorMemoryConfig( collection_name="preferences", persistence_path=os.path.join(str(Path.home()), ".chromadb_autogen"), k=2, # Return top k results score_threshold=0.4, # Minimum similarity score ) ) # a HttpChromaDBVectorMemoryConfig is also supported for connecting to a remote ChromaDB server # Add user preferences to memory await chroma_user_memory.add( MemoryContent( content="The weather should be in metric units", mime_type=MemoryMimeType.TEXT, metadata={"category": "preferences", "type": "units"}, ) ) await chroma_user_memory.add( MemoryContent( content="Meal recipe must be vegan", mime_type=MemoryMimeType.TEXT, metadata={"category": "preferences", "type": "dietary"}, ) ) # Create assistant agent with ChromaDB memory assistant_agent = AssistantAgent( name="assistant_agent", model_client=OpenAIChatCompletionClient( model="gpt-4o", ), tools=[get_weather], memory=[user_memory], ) stream = assistant_agent.run_stream(task="What is the weather in New York?") await Console(stream) await user_memory.close() ``` ```txt ---------- user ---------- What is the weather in New York? ---------- assistant_agent ---------- [MemoryContent(content='The weather should be in metric units', mime_type='MemoryMimeType.TEXT', metadata={'category': 'preferences', 'mime_type': 'MemoryMimeType.TEXT', 'type': 'units', 'score': 0.4342913043162201, 'id': '8a8d683c-5866-41e1-ac17-08c4fda6da86'}), MemoryContent(content='The weather should be in metric units', mime_type='MemoryMimeType.TEXT', metadata={'category': 'preferences', 'mime_type': 'MemoryMimeType.TEXT', 'type': 'units', 'score': 0.4342913043162201, 'id': 'f27af42c-cb63-46f0-b26b-ffcc09955ca1'})] ---------- assistant_agent ---------- [FunctionCall(id='call_a8U3YEj2dxA065vyzdfXDtNf', arguments='{"city":"New York","units":"metric"}', name='get_weather')] ---------- assistant_agent ---------- [FunctionExecutionResult(content='The weather in New York is 23 °C and Sunny.', call_id='call_a8U3YEj2dxA065vyzdfXDtNf', is_error=False)] ---------- assistant_agent ---------- The weather in New York is 23 °C and Sunny. ``` Note that MemoryContent object in the MemoryQuery events have useful metadata like the score and id retrieved memories. ## Related issue number <!-- For example: "Closes #1234" --> ## Checks - [ ] I've included any doc changes needed for https://microsoft.github.io/autogen/. See https://microsoft.github.io/autogen/docs/Contribute#documentation to build and test documentation locally. - [ ] I've added tests (if relevant) corresponding to the changes introduced in this PR. - [ ] I've made sure all auto checks have passed.
175 lines
3.5 KiB
TOML
175 lines
3.5 KiB
TOML
[build-system]
|
|
requires = ["hatchling"]
|
|
build-backend = "hatchling.build"
|
|
|
|
[project]
|
|
name = "autogen-ext"
|
|
version = "0.4.8"
|
|
license = {file = "LICENSE-CODE"}
|
|
description = "AutoGen extensions library"
|
|
readme = "README.md"
|
|
requires-python = ">=3.10"
|
|
classifiers = [
|
|
"Programming Language :: Python :: 3",
|
|
"License :: OSI Approved :: MIT License",
|
|
"Operating System :: OS Independent",
|
|
]
|
|
dependencies = [
|
|
"autogen-core==0.4.8",
|
|
]
|
|
|
|
[project.optional-dependencies]
|
|
langchain = ["langchain_core~= 0.3.3"]
|
|
azure = [
|
|
"azure-ai-inference>=1.0.0b7",
|
|
"azure-core",
|
|
"azure-identity",
|
|
]
|
|
docker = ["docker~=7.0", "asyncio_atexit>=1.0.1"]
|
|
ollama = ["ollama>=0.4.7", "tiktoken>=0.8.0"]
|
|
openai = ["openai>=1.52.2", "tiktoken>=0.8.0", "aiofiles"]
|
|
file-surfer = [
|
|
"autogen-agentchat==0.4.8",
|
|
"markitdown>=0.0.1a2",
|
|
]
|
|
graphrag = ["graphrag>=1.0.1"]
|
|
chromadb = ["chromadb"]
|
|
web-surfer = [
|
|
"autogen-agentchat==0.4.8",
|
|
"playwright>=1.48.0",
|
|
"pillow>=11.0.0",
|
|
"markitdown>=0.0.1a2",
|
|
]
|
|
magentic-one = [
|
|
"autogen-agentchat==0.4.8",
|
|
"markitdown>=0.0.1a2",
|
|
"playwright>=1.48.0",
|
|
"pillow>=11.0.0",
|
|
]
|
|
video-surfer = [
|
|
"autogen-agentchat==0.4.8",
|
|
"opencv-python>=4.5",
|
|
"ffmpeg-python",
|
|
"openai-whisper",
|
|
]
|
|
diskcache = [
|
|
"diskcache>=5.6.3"
|
|
]
|
|
redis = [
|
|
"redis>=5.2.1"
|
|
]
|
|
|
|
grpc = [
|
|
"grpcio~=1.70.0",
|
|
]
|
|
|
|
jupyter-executor = [
|
|
"ipykernel>=6.29.5",
|
|
"nbclient>=0.10.2",
|
|
]
|
|
|
|
semantic-kernel-core = [
|
|
"semantic-kernel>=1.17.1",
|
|
]
|
|
|
|
gemini = [
|
|
"google-genai>=1.0.0",
|
|
]
|
|
|
|
semantic-kernel-google = [
|
|
"semantic-kernel[google]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-hugging-face = [
|
|
"semantic-kernel[hugging_face]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-mistralai = [
|
|
"semantic-kernel[mistralai]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-ollama = [
|
|
"semantic-kernel[ollama]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-onnx = [
|
|
"semantic-kernel[onnx]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-anthropic = [
|
|
"semantic-kernel[anthropic]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-pandas = [
|
|
"semantic-kernel[pandas]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-aws = [
|
|
"semantic-kernel[aws]>=1.17.1",
|
|
]
|
|
|
|
semantic-kernel-dapr = [
|
|
"semantic-kernel[dapr]>=1.17.1",
|
|
]
|
|
|
|
http-tool = [
|
|
"httpx>=0.27.0",
|
|
"json-schema-to-pydantic>=0.2.0"
|
|
]
|
|
|
|
semantic-kernel-all = [
|
|
"semantic-kernel[google,hugging_face,mistralai,ollama,onnx,anthropic,usearch,pandas,aws,dapr]>=1.17.1",
|
|
]
|
|
|
|
rich = ["rich>=13.9.4"]
|
|
|
|
mcp = [
|
|
"mcp>=1.1.3",
|
|
"json-schema-to-pydantic>=0.2.2"
|
|
]
|
|
|
|
[tool.hatch.build.targets.wheel]
|
|
packages = ["src/autogen_ext"]
|
|
|
|
[dependency-groups]
|
|
dev = [
|
|
"autogen_test_utils",
|
|
"langchain-experimental",
|
|
"pandas-stubs>=2.2.3.241126",
|
|
"httpx>=0.28.1",
|
|
]
|
|
|
|
[tool.ruff]
|
|
extend = "../../pyproject.toml"
|
|
include = ["src/**", "tests/*.py"]
|
|
exclude = ["src/autogen_ext/agents/web_surfer/*.js", "src/autogen_ext/runtimes/grpc/protos", "tests/protos"]
|
|
|
|
[tool.pyright]
|
|
extends = "../../pyproject.toml"
|
|
include = ["src", "tests"]
|
|
exclude = ["src/autogen_ext/runtimes/grpc/protos", "tests/protos"]
|
|
|
|
[tool.pytest.ini_options]
|
|
minversion = "6.0"
|
|
testpaths = ["tests"]
|
|
markers = [
|
|
"grpc",
|
|
]
|
|
|
|
[tool.poe]
|
|
include = "../../shared_tasks.toml"
|
|
|
|
[tool.poe.tasks]
|
|
test.sequence = [
|
|
"playwright install",
|
|
"pytest -n 1 --cov=src --cov-report=term-missing --cov-report=xml",
|
|
]
|
|
test.default_item_type = "cmd"
|
|
test-grpc = "pytest -n 1 --cov=src --cov-report=term-missing --cov-report=xml --grpc"
|
|
mypy = "mypy --config-file ../../pyproject.toml --exclude src/autogen_ext/runtimes/grpc/protos --exclude tests/protos src tests"
|
|
|
|
[tool.mypy]
|
|
[[tool.mypy.overrides]]
|
|
module = "docker.*"
|
|
ignore_missing_imports = true
|