mirror of
https://github.com/microsoft/autogen.git
synced 2025-07-12 11:30:41 +00:00

* LMM Code added * LLaVA notebook update * Test cases and Notebook modified for OpenAI v1 * Move LMM into contrib To resolve test issues and deploy issues In the future, we can install pillow by default, and then move back LMM agents into agentchat * LMM test setup update * try...except... clause for LMM tests * disable patch for llava agent test To resolve dependencies issue for build * Add LMM Blog * Change docstring for LMM agents * Docstring update patch * llava: insert reply at position 1 now So, it can still handle human_input_mode and max_consecutive_reply * Resolve comments Fixing: typos, blogs, yml, and add OpenAIWrapper * Signature typo fix for LMM agent: system_message * Update LMM "content" from latest OpenAI release Reference https://platform.openai.com/docs/guides/vision * update LMM test according to latest OpenAI release * Fully support GPT-4V now 1. Add a notebook for GPT-4V. LLava notebook also updated. 2. img_utils updated 3. GPT-4V formatter now return base64 image with mime type 4. Infer mime type directly from b64 image content (while loading without suffix) 5. Test cases modified according to all the related changes. * GPT-4V link updated in blog --------- Co-authored-by: Chi Wang <wang.chi@microsoft.com>
63 lines
1.7 KiB
Python
63 lines
1.7 KiB
Python
import os
|
|
|
|
import setuptools
|
|
|
|
here = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
with open("README.md", "r", encoding="UTF-8") as fh:
|
|
long_description = fh.read()
|
|
|
|
# Get the code version
|
|
version = {}
|
|
with open(os.path.join(here, "autogen/version.py")) as fp:
|
|
exec(fp.read(), version)
|
|
__version__ = version["__version__"]
|
|
|
|
install_requires = [
|
|
"openai==1.0.0b3",
|
|
"diskcache",
|
|
"termcolor",
|
|
"flaml",
|
|
"python-dotenv",
|
|
"tiktoken",
|
|
]
|
|
|
|
setuptools.setup(
|
|
name="pyautogen",
|
|
version=__version__,
|
|
author="AutoGen",
|
|
author_email="auto-gen@outlook.com",
|
|
description="Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework",
|
|
long_description=long_description,
|
|
long_description_content_type="text/markdown",
|
|
url="https://github.com/microsoft/autogen",
|
|
packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]),
|
|
# package_data={
|
|
# "autogen.default": ["*/*.json"],
|
|
# },
|
|
# include_package_data=True,
|
|
install_requires=install_requires,
|
|
extras_require={
|
|
"test": [
|
|
"coverage>=5.3",
|
|
"ipykernel",
|
|
"nbconvert",
|
|
"nbformat",
|
|
"pre-commit",
|
|
"pytest-asyncio",
|
|
"pytest>=6.1.1",
|
|
],
|
|
"blendsearch": ["flaml[blendsearch]"],
|
|
"mathchat": ["sympy", "pydantic==1.10.9", "wolframalpha"],
|
|
"retrievechat": ["chromadb", "sentence_transformers", "pypdf", "ipython"],
|
|
"teachable": ["chromadb"],
|
|
"lmm": ["replicate", "pillow"],
|
|
},
|
|
classifiers=[
|
|
"Programming Language :: Python :: 3",
|
|
"License :: OSI Approved :: MIT License",
|
|
"Operating System :: OS Independent",
|
|
],
|
|
python_requires=">=3.8",
|
|
)
|