autogen/setup.py
Chi Wang c4f8b1c761
Dev/v0.2 (#393)
* api_base -> base_url (#383)

* InvalidRequestError -> BadRequestError (#389)

* remove api_key_path; close #388

* close #402 (#403)

* openai client (#419)

* openai client

* client test

* _client -> client

* _client -> client

* extra kwargs

* Completion -> client (#426)

* Completion -> client

* Completion -> client

* Completion -> client

* Completion -> client

* support aoai

* fix test error

* remove commented code

* support aoai

* annotations

* import

* reduce test

* skip test

* skip test

* skip test

* debug test

* rename test

* update workflow

* update workflow

* env

* py version

* doc improvement

* docstr update

* openai<1

* add tiktoken to dependency

* filter_func

* async test

* dependency

* migration guide (#477)

* migration guide

* change in kwargs

* simplify header

* update optigude description

* deal with azure gpt-3.5

* add back test_eval_math_responses

* timeout

* Add back tests for RetrieveChat (#480)

* Add back tests for RetrieveChat

* Fix format

* Update dependencies order

* Fix path

* Fix path

* Fix path

* Fix tests

* Add not run openai on MacOS or Win

* Update skip openai tests

* Remove unnecessary dependencies, improve format

* Add py3.8 for testing qdrant

* Fix multiline error of windows

* Add openai tests

* Add dependency mathchat, remove unused envs

* retrieve chat is tested

* bump version to 0.2.0b1

---------

Co-authored-by: Li Jiang <bnujli@gmail.com>
2023-11-04 04:01:49 +00:00

63 lines
1.7 KiB
Python

import setuptools
import os
here = os.path.abspath(os.path.dirname(__file__))
with open("README.md", "r", encoding="UTF-8") as fh:
long_description = fh.read()
# Get the code version
version = {}
with open(os.path.join(here, "autogen/version.py")) as fp:
exec(fp.read(), version)
__version__ = version["__version__"]
install_requires = [
"openai==1.0.0b3",
"diskcache",
"termcolor",
"flaml",
"python-dotenv",
"tiktoken",
]
setuptools.setup(
name="pyautogen",
version=__version__,
author="AutoGen",
author_email="auto-gen@outlook.com",
description="Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/microsoft/autogen",
packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]),
# package_data={
# "autogen.default": ["*/*.json"],
# },
# include_package_data=True,
install_requires=install_requires,
extras_require={
"test": [
"coverage>=5.3",
"ipykernel",
"nbconvert",
"nbformat",
"pre-commit",
"pytest-asyncio",
"pytest>=6.1.1",
],
"blendsearch": ["flaml[blendsearch]"],
"mathchat": ["sympy", "pydantic==1.10.9", "wolframalpha"],
"retrievechat": ["chromadb", "sentence_transformers", "pypdf", "ipython"],
"teachable": ["chromadb"],
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.8",
)