2019-11-27 14:02:23 +01:00
|
|
|
from io import open
|
|
|
|
|
|
|
|
from setuptools import find_packages, setup
|
|
|
|
|
2020-06-18 17:43:38 +02:00
|
|
|
|
|
|
|
def parse_requirements(filename):
|
|
|
|
"""
|
|
|
|
Parse a requirements pip file returning the list of required packages. It exclude commented lines and --find-links directives.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
filename: pip requirements requirements
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of required package with versions constraints
|
|
|
|
|
|
|
|
"""
|
|
|
|
with open(filename) as file:
|
|
|
|
parsed_requirements = file.read().splitlines()
|
|
|
|
parsed_requirements = [line.strip()
|
|
|
|
for line in parsed_requirements
|
2020-09-04 17:29:14 +02:00
|
|
|
if not ((line.strip()[0] == "#") or line.strip().startswith('--find-links') or ("git+https" in line))]
|
|
|
|
|
2020-06-18 17:43:38 +02:00
|
|
|
return parsed_requirements
|
|
|
|
|
|
|
|
|
|
|
|
def get_dependency_links(filename):
|
|
|
|
"""
|
|
|
|
Parse a requirements pip file looking for the --find-links directive.
|
|
|
|
Args:
|
|
|
|
filename: pip requirements requirements
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list of find-links's url
|
|
|
|
"""
|
|
|
|
with open(filename) as file:
|
|
|
|
parsed_requirements = file.read().splitlines()
|
|
|
|
dependency_links = list()
|
|
|
|
for line in parsed_requirements:
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('--find-links'):
|
|
|
|
dependency_links.append(line.split('=')[1])
|
|
|
|
return dependency_links
|
|
|
|
|
|
|
|
|
|
|
|
dependency_links = get_dependency_links('requirements.txt')
|
|
|
|
parsed_requirements = parse_requirements('requirements.txt')
|
2019-11-27 14:02:23 +01:00
|
|
|
|
|
|
|
setup(
|
2019-11-27 16:17:45 +01:00
|
|
|
name="farm-haystack",
|
2020-09-18 17:12:29 +02:00
|
|
|
version="0.4.0",
|
2019-11-27 14:02:23 +01:00
|
|
|
author="Malte Pietsch, Timo Moeller, Branden Chan, Tanay Soni",
|
|
|
|
author_email="malte.pietsch@deepset.ai",
|
|
|
|
description="Neural Question Answering at Scale. Use modern transformer based models like BERT to find answers in large document collections",
|
2020-11-02 20:15:10 +01:00
|
|
|
long_description=open("README.md", "r", encoding="utf-8").read(),
|
|
|
|
long_description_content_type="text/markdown",
|
2019-11-27 14:02:23 +01:00
|
|
|
keywords="QA Question-Answering Reader Retriever BERT roberta albert squad mrc transfer-learning language-model transformer",
|
|
|
|
license="Apache",
|
|
|
|
url="https://github.com/deepset-ai/haystack",
|
2020-09-18 17:12:29 +02:00
|
|
|
download_url="https://github.com/deepset-ai/haystack/archive/0.4.0.tar.gz",
|
2019-11-27 14:02:23 +01:00
|
|
|
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
|
2020-06-18 17:43:38 +02:00
|
|
|
dependency_links=dependency_links,
|
2019-11-27 14:02:23 +01:00
|
|
|
install_requires=parsed_requirements,
|
|
|
|
python_requires=">=3.6.0",
|
|
|
|
tests_require=["pytest"],
|
|
|
|
classifiers=[
|
|
|
|
"Intended Audience :: Science/Research",
|
|
|
|
"License :: OSI Approved :: Apache Software License",
|
|
|
|
"Programming Language :: Python :: 3",
|
|
|
|
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
|
|
|
],
|
|
|
|
)
|