mirror of
				https://github.com/microsoft/autogen.git
				synced 2025-10-31 17:59:50 +00:00 
			
		
		
		
	 95bd514a9a
			
		
	
	
		95bd514a9a
		
			
		
	
	
	
	
		
			
			* add initial global search draft * add graphrag dep * fix local search embedding * linting * add from config constructor * remove draft notebook * update config factory and add docstrings * add graphrag sample * add sample prompts * update readme * update deps * Add API docs * Update python/samples/agentchat_graphrag/requirements.txt * Update python/samples/agentchat_graphrag/requirements.txt * update docstrings with snippet and doc ref * lint * improve set up instructions in docstring * lint * update lock * Update python/packages/autogen-ext/src/autogen_ext/tools/graphrag/_global_search.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * Update python/packages/autogen-ext/src/autogen_ext/tools/graphrag/_local_search.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * add unit tests * update lock * update uv lock * add docstring newlines * stubs and typing on graphrag tests * fix docstrings * fix mypy error * + linting and type fixes * type fix graphrag sample * Update python/packages/autogen-ext/src/autogen_ext/tools/graphrag/_global_search.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * Update python/packages/autogen-ext/src/autogen_ext/tools/graphrag/_local_search.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * Update python/samples/agentchat_graphrag/requirements.txt Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * update overrides * fix docstring client imports * additional docstring fix * add docstring missing import * use openai and fix db path * use console for displaying messages * add model config and gitignore * update readme * lint * Update python/samples/agentchat_graphrag/README.md * Update python/samples/agentchat_graphrag/README.md * Comment remaining azure config --------- Co-authored-by: Leonardo Pinheiro <lpinheiro@microsoft.com> Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
		
			
				
	
	
		
			67 lines
		
	
	
		
			2.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			67 lines
		
	
	
		
			2.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| import argparse
 | |
| import asyncio
 | |
| import json
 | |
| import logging
 | |
| from typing import Any, Dict
 | |
| from autogen_agentchat.ui import Console
 | |
| from autogen_ext.tools.graphrag import (
 | |
|     GlobalSearchTool,
 | |
|     LocalSearchTool,
 | |
| )
 | |
| from autogen_agentchat.agents import AssistantAgent
 | |
| from autogen_core.models import ChatCompletionClient
 | |
| 
 | |
| 
 | |
| async def main(model_config: Dict[str, Any]) -> None:
 | |
|     # Initialize the model client from config
 | |
|     model_client = ChatCompletionClient.load_component(model_config)
 | |
| 
 | |
|     # Set up global search tool
 | |
|     global_tool = GlobalSearchTool.from_settings(
 | |
|         settings_path="./settings.yaml"
 | |
|     )
 | |
| 
 | |
|     local_tool = LocalSearchTool.from_settings(
 | |
|         settings_path="./settings.yaml"
 | |
|     )
 | |
| 
 | |
|     # Create assistant agent with both search tools
 | |
|     assistant_agent = AssistantAgent(
 | |
|         name="search_assistant",
 | |
|         tools=[global_tool, local_tool],
 | |
|         model_client=model_client,
 | |
|         system_message=(
 | |
|             "You are a tool selector AI assistant using the GraphRAG framework. "
 | |
|             "Your primary task is to determine the appropriate search tool to call based on the user's query. "
 | |
|             "For specific, detailed information about particular entities or relationships, call the 'local_search' function. "
 | |
|             "For broader, abstract questions requiring a comprehensive understanding of the dataset, call the 'global_search' function. "
 | |
|             "Do not attempt to answer the query directly; focus solely on selecting and calling the correct function."
 | |
|         )
 | |
|     )
 | |
| 
 | |
| 
 | |
|     # Run a sample query
 | |
|     query = "What does the station-master says about Dr. Becher?"
 | |
|     print(f"\nQuery: {query}")
 | |
|     
 | |
|     await Console(assistant_agent.run_stream(task=query))
 | |
| 
 | |
| 
 | |
| 
 | |
| if __name__ == "__main__":
 | |
|     parser = argparse.ArgumentParser(description="Run a GraphRAG search with an agent.")
 | |
|     parser.add_argument("--verbose", action="store_true", help="Enable verbose logging.")
 | |
|     parser.add_argument(
 | |
|         "--model-config", type=str, help="Path to the model configuration file.", default="model_config.json"
 | |
|     )
 | |
|     args = parser.parse_args()
 | |
|     if args.verbose:
 | |
|         logging.basicConfig(level=logging.WARNING)
 | |
|         logging.getLogger("autogen_core").setLevel(logging.DEBUG)
 | |
|         handler = logging.FileHandler("graphrag_search.log")
 | |
|         logging.getLogger("autogen_core").addHandler(handler)
 | |
| 
 | |
|     with open(args.model_config, "r") as f:
 | |
|         model_config = json.load(f)
 | |
|     asyncio.run(main(model_config))
 |