Update mimum openai version to 1.66.5 as import path changed (#5996)

Resolves #5994

Open AI moved `openai.types.beta.vector_store` to
`openai.types.vector_store`.
https://github.com/openai/openai-python/compare/v1.65.5...v1.66.0

Also fixed unit tests and use parameterized fixture to run all
scenarios.
This commit is contained in:
Eric Zhu 2025-03-18 22:20:04 -07:00 committed by GitHub
parent d83927e22a
commit 69292e6ff4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 56 additions and 41 deletions

View File

@ -95,7 +95,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -243,7 +243,7 @@
" # Upload the file.\n",
" await ctx.cancellation_token.link_future(\n",
" asyncio.ensure_future(\n",
" self._client.beta.vector_stores.file_batches.upload_and_poll(\n",
" self._client.vector_stores.file_batches.upload_and_poll(\n",
" vector_store_id=message.vector_store_id,\n",
" files=[(file_name, file_content)],\n",
" )\n",
@ -349,7 +349,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@ -364,7 +364,7 @@
")\n",
"\n",
"# Create a vector store to be used for file search.\n",
"vector_store = openai.beta.vector_stores.create()\n",
"vector_store = openai.vector_stores.create()\n",
"\n",
"# Create a thread which is used as the memory for the assistant.\n",
"thread = openai.beta.threads.create(\n",
@ -820,7 +820,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "autogen_core",
"display_name": ".venv",
"language": "python",
"name": "python3"
},
@ -834,7 +834,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.9"
"version": "3.12.3"
}
},
"nbformat": 4,

View File

@ -27,7 +27,7 @@ azure = [
]
docker = ["docker~=7.0", "asyncio_atexit>=1.0.1"]
ollama = ["ollama>=0.4.7", "tiktoken>=0.8.0"]
openai = ["openai>=1.52.2", "tiktoken>=0.8.0", "aiofiles"]
openai = ["openai>=1.66.5", "tiktoken>=0.8.0", "aiofiles"]
file-surfer = [
"autogen-agentchat==0.4.9",
"magika>=0.6.1rc2",

View File

@ -52,8 +52,8 @@ from openai.types.beta.file_search_tool_param import FileSearchToolParam
from openai.types.beta.function_tool_param import FunctionToolParam
from openai.types.beta.thread import Thread, ToolResources, ToolResourcesCodeInterpreter
from openai.types.beta.threads import Message, MessageDeleted, Run
from openai.types.beta.vector_store import VectorStore
from openai.types.shared_params.function_definition import FunctionDefinition
from openai.types.vector_store import VectorStore
event_logger = logging.getLogger(EVENT_LOGGER_NAME)
@ -223,7 +223,7 @@ class OpenAIAssistantAgent(BaseChatAgent):
tools (Optional[Iterable[Union[Literal["code_interpreter", "file_search"], Tool | Callable[..., Any] | Callable[..., Awaitable[Any]]]]]): Tools the assistant can use
assistant_id (Optional[str]): ID of existing assistant to use
thread_id (Optional[str]): ID of existing thread to use
metadata (Optional[object]): Additional metadata for the assistant
metadata (Optional[Dict[str, str]]): Additional metadata for the assistant.
response_format (Optional[AssistantResponseFormatOptionParam]): Response format settings
temperature (Optional[float]): Temperature for response generation
tool_resources (Optional[ToolResources]): Additional tool configuration
@ -247,7 +247,7 @@ class OpenAIAssistantAgent(BaseChatAgent):
] = None,
assistant_id: Optional[str] = None,
thread_id: Optional[str] = None,
metadata: Optional[object] = None,
metadata: Optional[Dict[str, str]] = None,
response_format: Optional["AssistantResponseFormatOptionParam"] = None,
temperature: Optional[float] = None,
tool_resources: Optional["ToolResources"] = None,
@ -625,7 +625,7 @@ class OpenAIAssistantAgent(BaseChatAgent):
# Create vector store if not already created
if self._vector_store_id is None:
vector_store: VectorStore = await cancellation_token.link_future(
asyncio.ensure_future(self._client.beta.vector_stores.create())
asyncio.ensure_future(self._client.vector_stores.create())
)
self._vector_store_id = vector_store.id
@ -644,7 +644,7 @@ class OpenAIAssistantAgent(BaseChatAgent):
# Create file batch with the file IDs
await cancellation_token.link_future(
asyncio.ensure_future(
self._client.beta.vector_stores.file_batches.create_and_poll(
self._client.vector_stores.file_batches.create_and_poll(
vector_store_id=self._vector_store_id, file_ids=file_ids
)
)
@ -678,7 +678,7 @@ class OpenAIAssistantAgent(BaseChatAgent):
if self._vector_store_id is not None:
try:
await cancellation_token.link_future(
asyncio.ensure_future(self._client.beta.vector_stores.delete(vector_store_id=self._vector_store_id))
asyncio.ensure_future(self._client.vector_stores.delete(vector_store_id=self._vector_store_id))
)
self._vector_store_id = None
except Exception as e:

View File

@ -13,7 +13,7 @@ from autogen_core import CancellationToken
from autogen_core.tools._base import BaseTool, Tool
from autogen_ext.agents.openai import OpenAIAssistantAgent
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
from openai import AsyncAzureOpenAI
from openai import AsyncAzureOpenAI, AsyncOpenAI
from pydantic import BaseModel
@ -88,9 +88,9 @@ class FakeCursorPage:
return False
def create_mock_openai_client() -> AsyncAzureOpenAI:
def create_mock_openai_client() -> AsyncOpenAI:
# Create the base client as an AsyncMock.
client = AsyncMock(spec=AsyncAzureOpenAI)
client = AsyncMock(spec=AsyncOpenAI)
# Create a "beta" attribute with the required nested structure.
beta = MagicMock()
@ -130,12 +130,12 @@ def create_mock_openai_client() -> AsyncAzureOpenAI:
beta.threads.runs.retrieve = AsyncMock(return_value=MagicMock(id="run-mock", status="completed"))
beta.threads.runs.submit_tool_outputs = AsyncMock(return_value=MagicMock(id="run-mock", status="completed"))
# Setup beta.vector_stores with create, delete, and file_batches.
beta.vector_stores = MagicMock()
beta.vector_stores.create = AsyncMock(return_value=MagicMock(id="vector-mock"))
beta.vector_stores.delete = AsyncMock(return_value=None)
beta.vector_stores.file_batches = MagicMock()
beta.vector_stores.file_batches.create_and_poll = AsyncMock(return_value=None)
# Setup client.vector_stores with create, delete, and file_batches.
client.vector_stores = MagicMock()
client.vector_stores.create = AsyncMock(return_value=MagicMock(id="vector-mock"))
client.vector_stores.delete = AsyncMock(return_value=None)
client.vector_stores.file_batches = MagicMock()
client.vector_stores.file_batches.create_and_poll = AsyncMock(return_value=None)
# Setup client.files with create and delete.
client.files = MagicMock()
@ -147,22 +147,33 @@ def create_mock_openai_client() -> AsyncAzureOpenAI:
# Fixture for the mock client.
@pytest.fixture
def mock_openai_client() -> AsyncAzureOpenAI:
def mock_openai_client() -> AsyncOpenAI:
return create_mock_openai_client()
@pytest.fixture
def client() -> AsyncAzureOpenAI:
@pytest.fixture(params=["openai", "azure", "mock"])
def client(request: pytest.FixtureRequest) -> AsyncOpenAI:
client_type = request.param
if client_type == "mock":
# Return a mock OpenAI client.
return create_mock_openai_client()
if client_type == "openai":
# Check for OpenAI credentials in environment variables.
openai_api_key = os.getenv("OPENAI_API_KEY")
if openai_api_key:
return AsyncOpenAI(api_key=openai_api_key)
else:
pytest.skip("OPENAI_API_KEY not set in environment variables.")
# Check for Azure OpenAI credentials in environment variables.
azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
api_version = os.getenv("AZURE_OPENAI_API_VERSION", "2024-08-01-preview")
api_key = os.getenv("AZURE_OPENAI_API_KEY")
# Return mock client if credentials not available
if not azure_endpoint or not api_key:
return create_mock_openai_client()
# Try Azure CLI credentials if API key not provided
if not api_key:
if azure_endpoint and not api_key:
# Try Azure CLI credentials if API key not provided
try:
token_provider = get_bearer_token_provider(
DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
@ -171,14 +182,17 @@ def client() -> AsyncAzureOpenAI:
azure_endpoint=azure_endpoint, api_version=api_version, azure_ad_token_provider=token_provider
)
except Exception:
return create_mock_openai_client()
pytest.skip("Failed to obtain Azure CLI credentials.")
# Fall back to API key auth if provided
return AsyncAzureOpenAI(azure_endpoint=azure_endpoint, api_version=api_version, api_key=api_key)
if azure_endpoint and api_key:
# Use Azure OpenAI with API key authentication.
return AsyncAzureOpenAI(azure_endpoint=azure_endpoint, api_version=api_version, api_key=api_key)
pytest.skip("AZURE_OPENAI_ENDPOINT not set in environment variables.")
@pytest.fixture
def agent(client: AsyncAzureOpenAI) -> OpenAIAssistantAgent:
def agent(client: AsyncOpenAI) -> OpenAIAssistantAgent:
tools: List[Union[Literal["code_interpreter", "file_search"], Tool]] = [
"code_interpreter",
"file_search",
@ -266,6 +280,7 @@ async def test_code_interpreter(
@pytest.mark.asyncio
@pytest.mark.parametrize("client", ["mock"], indirect=True)
async def test_quiz_creation(
agent: OpenAIAssistantAgent, cancellation_token: CancellationToken, monkeypatch: pytest.MonkeyPatch
) -> None:
@ -322,7 +337,7 @@ async def test_quiz_creation(
@pytest.mark.asyncio
async def test_on_reset_behavior(client: AsyncAzureOpenAI, cancellation_token: CancellationToken) -> None:
async def test_on_reset_behavior(client: AsyncOpenAI, cancellation_token: CancellationToken) -> None:
# Arrange: Use the default behavior for reset.
thread = await client.beta.threads.create()
await client.beta.threads.messages.create(
@ -356,7 +371,7 @@ async def test_on_reset_behavior(client: AsyncAzureOpenAI, cancellation_token: C
@pytest.mark.asyncio
async def test_save_and_load_state(mock_openai_client: AsyncAzureOpenAI) -> None:
async def test_save_and_load_state(mock_openai_client: AsyncOpenAI) -> None:
agent = OpenAIAssistantAgent(
name="assistant",
description="Dummy assistant for state testing",

8
python/uv.lock generated
View File

@ -751,7 +751,7 @@ requires-dist = [
{ name = "mcp", marker = "extra == 'mcp'", specifier = ">=1.1.3" },
{ name = "nbclient", marker = "extra == 'jupyter-executor'", specifier = ">=0.10.2" },
{ name = "ollama", marker = "extra == 'ollama'", specifier = ">=0.4.7" },
{ name = "openai", marker = "extra == 'openai'", specifier = ">=1.52.2" },
{ name = "openai", marker = "extra == 'openai'", specifier = ">=1.66.5" },
{ name = "openai-whisper", marker = "extra == 'video-surfer'" },
{ name = "opencv-python", marker = "extra == 'video-surfer'", specifier = ">=4.5" },
{ name = "pillow", marker = "extra == 'magentic-one'", specifier = ">=11.0.0" },
@ -4880,7 +4880,7 @@ wheels = [
[[package]]
name = "openai"
version = "1.60.2"
version = "1.66.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@ -4892,9 +4892,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/08/ae/8d9706b8ff2363287b4a8807de2dd29cdbdad5424e9d05d345df724320f5/openai-1.60.2.tar.gz", hash = "sha256:a8f843e10f2855713007f491d96afb2694b11b5e02cb97c7d01a0be60bc5bb51", size = 348185 }
sdist = { url = "https://files.pythonhosted.org/packages/bb/10/b19dc682c806e6735a8387f2003afe2abada9f9e5227318de642c6949524/openai-1.66.5.tar.gz", hash = "sha256:f61b8fac29490ca8fdc6d996aa6926c18dbe5639536f8c40219c40db05511b11", size = 398595 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/5a/d5474ca67a547dde9b87b5bc8a8f90eadf29f523d410f2ba23d63c9b82ec/openai-1.60.2-py3-none-any.whl", hash = "sha256:993bd11b96900b9098179c728026f016b4982ded7ee30dfcf4555eab1171fff9", size = 456107 },
{ url = "https://files.pythonhosted.org/packages/c7/3b/1ba418920ecd1eae7cc4d4ac8a01711ee0879b1a57dd81d10551e5b9a2ea/openai-1.66.5-py3-none-any.whl", hash = "sha256:74be528175f8389f67675830c51a15bd51e874425c86d3de6153bf70ed6c2884", size = 571144 },
]
[[package]]