mirror of
https://github.com/microsoft/autogen.git
synced 2025-07-12 03:21:14 +00:00

* Start Gemini integration: works ok with Text now * Gemini notebook lint * try catch "import" for Gemini * Debug: id issue for chat completion in Gemini * Add RAG example * Update docs for RAG * Fix missing pydash * Remove temp folder * Fix test error in runs/7206014032/job/19630042864 * Fix tqdm warning * Fix notebook output * Gemini's vision model is supported now * Install instructions for the Gemini branch * Catch and retry when see Interval Server Error 500 * Allow gemini to take more flexible messages i.e., it can take messages where "user" is not the last role. * Use int time for Gemini client * Handle other exceptions in gemini call * rename to "create" function for gemini * GeminiClient compatible with ModelClient now * Lint * Update instructions in Gemini notebook * Lint * Remove empty blocks from Gemini notebook * Add gemini into example page * self.create instead of call * Add py and Py into python execution * Remove error code from merging * Remove pydash dependency for gemini * Add cloud-gemini doc * Remove temp file * cache import update * Add test case for summary with mm input * Lint: warnings instead of print * Add test cases for gemini * Gemini test config * Disable default model for gemini * Typo fix in gemini workflow * Correct grammar in example notebook * Raise if "model" is not provided in create(...) * Move TODOs into a roadmap * Update .github/workflows/contrib-tests.yml Co-authored-by: Davor Runje <davor@airt.ai> * Gemini test config update * Update setup.py Co-authored-by: Davor Runje <davor@airt.ai> * Update test/oai/test_gemini.py Co-authored-by: Davor Runje <davor@airt.ai> * Update test/oai/test_gemini.py Co-authored-by: Davor Runje <davor@airt.ai> * Remove python 3.8 from gemini No google's generativeai for Windows with Python 3.8 * Update import error handling for gemini * Count tokens and cost for gemini --------- Co-authored-by: Li Jiang <bnujli@gmail.com> Co-authored-by: Davor Runje <davor@airt.ai>
82 lines
2.7 KiB
Python
82 lines
2.7 KiB
Python
import os
|
|
|
|
import setuptools
|
|
|
|
here = os.path.abspath(os.path.dirname(__file__))
|
|
|
|
with open("README.md", "r", encoding="UTF-8") as fh:
|
|
long_description = fh.read()
|
|
|
|
# Get the code version
|
|
version = {}
|
|
with open(os.path.join(here, "autogen/version.py")) as fp:
|
|
exec(fp.read(), version)
|
|
__version__ = version["__version__"]
|
|
|
|
install_requires = [
|
|
"openai>=1.3",
|
|
"diskcache",
|
|
"termcolor",
|
|
"flaml",
|
|
# numpy is installed by flaml, but we want to pin the version to below 2.x (see https://github.com/microsoft/autogen/issues/1960)
|
|
"numpy>=1.17.0,<2",
|
|
"python-dotenv",
|
|
"tiktoken",
|
|
# Disallowing 2.6.0 can be removed when this is fixed https://github.com/pydantic/pydantic/issues/8705
|
|
"pydantic>=1.10,<3,!=2.6.0", # could be both V1 and V2
|
|
"docker",
|
|
]
|
|
|
|
jupyter_executor = [
|
|
"jupyter-kernel-gateway",
|
|
"websocket-client",
|
|
"requests",
|
|
"jupyter-client>=8.6.0",
|
|
"ipykernel>=6.29.0",
|
|
]
|
|
|
|
setuptools.setup(
|
|
name="pyautogen",
|
|
version=__version__,
|
|
author="AutoGen",
|
|
author_email="auto-gen@outlook.com",
|
|
description="Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework",
|
|
long_description=long_description,
|
|
long_description_content_type="text/markdown",
|
|
url="https://github.com/microsoft/autogen",
|
|
packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]),
|
|
install_requires=install_requires,
|
|
extras_require={
|
|
"test": [
|
|
"coverage>=5.3",
|
|
"ipykernel",
|
|
"nbconvert",
|
|
"nbformat",
|
|
"pre-commit",
|
|
"pytest-asyncio",
|
|
"pytest>=6.1.1,<8",
|
|
"pandas",
|
|
],
|
|
"blendsearch": ["flaml[blendsearch]"],
|
|
"mathchat": ["sympy", "pydantic==1.10.9", "wolframalpha"],
|
|
"retrievechat": ["chromadb", "sentence_transformers", "pypdf", "ipython", "beautifulsoup4", "markdownify"],
|
|
"autobuild": ["chromadb", "sentence-transformers", "huggingface-hub"],
|
|
"teachable": ["chromadb"],
|
|
"lmm": ["replicate", "pillow"],
|
|
"graph": ["networkx", "matplotlib"],
|
|
"gemini": ["google-generativeai>=0.5,<1", "pillow", "pydantic"],
|
|
"websurfer": ["beautifulsoup4", "markdownify", "pdfminer.six", "pathvalidate"],
|
|
"redis": ["redis"],
|
|
"cosmosdb": ["azure-cosmos>=4.2.0"],
|
|
"websockets": ["websockets>=12.0,<13"],
|
|
"jupyter-executor": jupyter_executor,
|
|
"types": ["mypy==1.9.0", "pytest>=6.1.1,<8"] + jupyter_executor,
|
|
},
|
|
classifiers=[
|
|
"Programming Language :: Python :: 3",
|
|
"License :: OSI Approved :: MIT License",
|
|
"Operating System :: OS Independent",
|
|
],
|
|
python_requires=">=3.8,<3.13",
|
|
)
|