mirror of
				https://github.com/microsoft/autogen.git
				synced 2025-11-04 03:39:52 +00:00 
			
		
		
		
	Add basic notebook for gptassistant (#636)
* Refactor GPTAssistantAgent constructor to handle instructions and overwrite_instructions flag - Ensure that `system_message` is always consistent with `instructions` - Ensure provided instructions are always used - Add option to permanently modify the instructions of the assistant * Improve default behavior * Add a test; add method to delete assistant * Add a new test for overwriting instructions * Add test case for when no instructions are given for existing assistant * Add pytest markers to test_gpt_assistant.py * add test in workflow * update * fix test_client_stream * comment out test_hierarchy_ * Add basic gptassistant notebook - also improve logging in gpt assistant * Update notebook/agentchat_oai_assistant_twoagents_basic.ipynb Co-authored-by: Qingyun Wu <qingyun.wu@psu.edu> --------- Co-authored-by: Chi Wang <wang.chi@microsoft.com> Co-authored-by: kevin666aa <yrwu000627@gmail.com> Co-authored-by: Qingyun Wu <qingyun.wu@psu.edu>
This commit is contained in:
		
							parent
							
								
									c22a322aff
								
							
						
					
					
						commit
						032c05e954
					
				@ -52,6 +52,7 @@ class GPTAssistantAgent(ConversableAgent):
 | 
			
		||||
        self._openai_client = oai_wrapper._clients[0]
 | 
			
		||||
        openai_assistant_id = llm_config.get("assistant_id", None)
 | 
			
		||||
        if openai_assistant_id is None:
 | 
			
		||||
            logger.warning("assistant_id was None, creating a new assistant")
 | 
			
		||||
            # create a new assistant
 | 
			
		||||
            if instructions is None:
 | 
			
		||||
                logger.warning(
 | 
			
		||||
@ -346,4 +347,5 @@ class GPTAssistantAgent(ConversableAgent):
 | 
			
		||||
 | 
			
		||||
    def delete_assistant(self):
 | 
			
		||||
        """Delete the assistant from OAI assistant API"""
 | 
			
		||||
        logger.warning("Permanently deleting assistant...")
 | 
			
		||||
        self._openai_client.beta.assistants.delete(self.assistant_id)
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										214
									
								
								notebook/agentchat_oai_assistant_twoagents_basic.ipynb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										214
									
								
								notebook/agentchat_oai_assistant_twoagents_basic.ipynb
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,214 @@
 | 
			
		||||
{
 | 
			
		||||
 "cells": [
 | 
			
		||||
  {
 | 
			
		||||
   "cell_type": "markdown",
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "source": [
 | 
			
		||||
    "## OpenAI Assistants in AutoGen\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "This notebook shows a very basic example of the [`GPTAssistantAgent`](https://github.com/microsoft/autogen/blob/main/autogen/agentchat/contrib/gpt_assistant_agent.py#L16C43-L16C43), which is an experimental AutoGen agent class that leverages the [OpenAI Assistant API](https://platform.openai.com/docs/assistants/overview) for conversational capabilities,  working with\n",
 | 
			
		||||
    "`UserProxyAgent` in AutoGen."
 | 
			
		||||
   ]
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
   "cell_type": "code",
 | 
			
		||||
   "execution_count": 1,
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "assistant_id was None, creating a new assistant\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "\u001b[33muser_proxy\u001b[0m (to assistant):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "Print hello world\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[33massistant\u001b[0m (to user_proxy):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "```python\n",
 | 
			
		||||
      "print(\"Hello, World!\")\n",
 | 
			
		||||
      "```\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "Please run this Python code to print \"Hello, World!\" to the console.\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[31m\n",
 | 
			
		||||
      ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "\u001b[33muser_proxy\u001b[0m (to assistant):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "exitcode: 0 (execution succeeded)\n",
 | 
			
		||||
      "Code output: \n",
 | 
			
		||||
      "Hello, World!\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[33massistant\u001b[0m (to user_proxy):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "The code executed successfully and printed \"Hello, World!\" as expected.\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "TERMINATE\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n"
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "source": [
 | 
			
		||||
    "import logging\n",
 | 
			
		||||
    "import os\n",
 | 
			
		||||
    " \n",
 | 
			
		||||
    "from autogen import config_list_from_json\n",
 | 
			
		||||
    "from autogen import AssistantAgent\n",
 | 
			
		||||
    "from autogen.agentchat.contrib.gpt_assistant_agent import GPTAssistantAgent\n",
 | 
			
		||||
    "from autogen import UserProxyAgent\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "logger = logging.getLogger(__name__)\n",
 | 
			
		||||
    "logger.setLevel(logging.WARNING)\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "assistant_id = os.environ.get(\"ASSISTANT_ID\", None)\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "config_list = config_list_from_json(\"OAI_CONFIG_LIST\")\n",
 | 
			
		||||
    "llm_config = {\n",
 | 
			
		||||
    "    \"config_list\": config_list,\n",
 | 
			
		||||
    "    \"assistant_id\": assistant_id\n",
 | 
			
		||||
    "}\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "gpt_assistant = GPTAssistantAgent(name=\"assistant\",\n",
 | 
			
		||||
    "                                instructions=AssistantAgent.DEFAULT_SYSTEM_MESSAGE,\n",
 | 
			
		||||
    "                                llm_config=llm_config)\n",
 | 
			
		||||
    "\n",
 | 
			
		||||
    "user_proxy = UserProxyAgent(name=\"user_proxy\",\n",
 | 
			
		||||
    "    code_execution_config={\n",
 | 
			
		||||
    "        \"work_dir\": \"coding\"\n",
 | 
			
		||||
    "    },\n",
 | 
			
		||||
    "    is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n",
 | 
			
		||||
    "    human_input_mode=\"NEVER\",\n",
 | 
			
		||||
    "    max_consecutive_auto_reply=1)\n",
 | 
			
		||||
    "user_proxy.initiate_chat(gpt_assistant, message=\"Print hello world\")"
 | 
			
		||||
   ]
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
   "cell_type": "code",
 | 
			
		||||
   "execution_count": 2,
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "\u001b[33muser_proxy\u001b[0m (to assistant):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "Write py code to eval 2 + 2\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[33massistant\u001b[0m (to user_proxy):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "```python\n",
 | 
			
		||||
      "# Let's write a simple Python code to evaluate 2 + 2 and print the result.\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "result = 2 + 2\n",
 | 
			
		||||
      "print(result)\n",
 | 
			
		||||
      "```\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[31m\n",
 | 
			
		||||
      ">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n"
 | 
			
		||||
     ]
 | 
			
		||||
    },
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stdout",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "\u001b[33muser_proxy\u001b[0m (to assistant):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "exitcode: 0 (execution succeeded)\n",
 | 
			
		||||
      "Code output: \n",
 | 
			
		||||
      "4\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n",
 | 
			
		||||
      "\u001b[33massistant\u001b[0m (to user_proxy):\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "The Python code was executed successfully and the result of evaluating 2 + 2 is 4.\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "TERMINATE\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "\n",
 | 
			
		||||
      "--------------------------------------------------------------------------------\n"
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "source": [
 | 
			
		||||
    "user_proxy.initiate_chat(gpt_assistant, message=\"Write py code to eval 2 + 2\", clear_history=True)"
 | 
			
		||||
   ]
 | 
			
		||||
  },
 | 
			
		||||
  {
 | 
			
		||||
   "cell_type": "code",
 | 
			
		||||
   "execution_count": 3,
 | 
			
		||||
   "metadata": {},
 | 
			
		||||
   "outputs": [
 | 
			
		||||
    {
 | 
			
		||||
     "name": "stderr",
 | 
			
		||||
     "output_type": "stream",
 | 
			
		||||
     "text": [
 | 
			
		||||
      "Permanently deleting assistant...\n"
 | 
			
		||||
     ]
 | 
			
		||||
    }
 | 
			
		||||
   ],
 | 
			
		||||
   "source": [
 | 
			
		||||
    "gpt_assistant.delete_assistant()"
 | 
			
		||||
   ]
 | 
			
		||||
  }
 | 
			
		||||
 ],
 | 
			
		||||
 "metadata": {
 | 
			
		||||
  "kernelspec": {
 | 
			
		||||
   "display_name": "Python 3",
 | 
			
		||||
   "language": "python",
 | 
			
		||||
   "name": "python3"
 | 
			
		||||
  },
 | 
			
		||||
  "language_info": {
 | 
			
		||||
   "codemirror_mode": {
 | 
			
		||||
    "name": "ipython",
 | 
			
		||||
    "version": 3
 | 
			
		||||
   },
 | 
			
		||||
   "file_extension": ".py",
 | 
			
		||||
   "mimetype": "text/x-python",
 | 
			
		||||
   "name": "python",
 | 
			
		||||
   "nbconvert_exporter": "python",
 | 
			
		||||
   "pygments_lexer": "ipython3",
 | 
			
		||||
   "version": "3.10.12"
 | 
			
		||||
  }
 | 
			
		||||
 },
 | 
			
		||||
 "nbformat": 4,
 | 
			
		||||
 "nbformat_minor": 2
 | 
			
		||||
}
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user