mirror of
				https://github.com/deepset-ai/haystack.git
				synced 2025-11-03 19:29:32 +00:00 
			
		
		
		
	* Files moved, imports all broken * Fix most imports and docstrings into * Fix the paths to the modules in the API docs * Add latest docstring and tutorial changes * Add a few pipelines that were lost in the inports * Fix a bunch of mypy warnings * Add latest docstring and tutorial changes * Create a file_classifier module * Add docs for file_classifier * Fixed most circular imports, now the REST API can start * Add latest docstring and tutorial changes * Tackling more mypy issues * Reintroduce from FARM and fix last mypy issues hopefully * Re-enable old-style imports * Fix some more import from the top-level package in an attempt to sort out circular imports * Fix some imports in tests to new-style to prevent failed class equalities from breaking tests * Change document_store into document_stores * Update imports in tutorials * Add latest docstring and tutorial changes * Probably fixes summarizer tests * Improve the old-style import allowing module imports (should work) * Try to fix the docs * Remove dedicated KnowledgeGraph page from autodocs * Remove dedicated GraphRetriever page from autodocs * Fix generate_docstrings.sh with an updated list of yaml files to look for * Fix some more modules in the docs * Fix the document stores docs too * Fix a small issue on Tutorial14 * Add latest docstring and tutorial changes * Add deprecation warning to old-style imports * Remove stray folder and import Dict into dense.py * Change import path for MLFlowLogger * Add old loggers path to the import path aliases * Fix debug output of convert_ipynb.py * Fix circular import on BaseRetriever * Missed one merge block * re-run tutorial 5 * Fix imports in tutorial 5 * Re-enable squad_to_dpr CLI from the root package and move get_batches_from_generator into document_stores.base * Add latest docstring and tutorial changes * Fix typo in utils __init__ * Fix a few more imports * Fix benchmarks too * New-style imports in test_knowledge_graph * Rollback setup.py * Rollback squad_to_dpr too Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
		
			
				
	
	
		
			89 lines
		
	
	
		
			3.1 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			89 lines
		
	
	
		
			3.1 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
import os
 | 
						|
import re
 | 
						|
from io import open
 | 
						|
 | 
						|
from setuptools import find_packages, setup
 | 
						|
 | 
						|
 | 
						|
def parse_requirements(filename):
 | 
						|
    """
 | 
						|
    Parse a requirements pip file returning the list of required packages. It exclude commented lines and --find-links directives.
 | 
						|
 | 
						|
    Args:
 | 
						|
        filename: pip requirements requirements
 | 
						|
 | 
						|
    Returns:
 | 
						|
        list of required package with versions constraints
 | 
						|
 | 
						|
    """
 | 
						|
    with open(filename) as file:
 | 
						|
        parsed_requirements = file.read().splitlines()
 | 
						|
    parsed_requirements = [line.strip()
 | 
						|
                           for line in parsed_requirements
 | 
						|
                           if not ((line.strip()[0] == "#") or line.strip().startswith('--find-links') or ("git+https" in line))]
 | 
						|
 | 
						|
    return parsed_requirements
 | 
						|
 | 
						|
 | 
						|
def get_dependency_links(filename):
 | 
						|
    """
 | 
						|
     Parse a requirements pip file looking for the --find-links directive.
 | 
						|
    Args:
 | 
						|
        filename:  pip requirements requirements
 | 
						|
 | 
						|
    Returns:
 | 
						|
        list of find-links's url
 | 
						|
    """
 | 
						|
    with open(filename) as file:
 | 
						|
        parsed_requirements = file.read().splitlines()
 | 
						|
    dependency_links = list()
 | 
						|
    for line in parsed_requirements:
 | 
						|
        line = line.strip()
 | 
						|
        if line.startswith('--find-links'):
 | 
						|
            dependency_links.append(line.split('=')[1])
 | 
						|
    return dependency_links
 | 
						|
 | 
						|
 | 
						|
dependency_links = get_dependency_links('requirements.txt')
 | 
						|
parsed_requirements = parse_requirements('requirements.txt')
 | 
						|
 | 
						|
 | 
						|
def versionfromfile(*filepath):
 | 
						|
    infile = os.path.join(*filepath)
 | 
						|
    with open(infile) as fp:
 | 
						|
        version_match = re.search(
 | 
						|
                r"^__version__\s*=\s*['\"]([^'\"]*)['\"]", fp.read(), re.M
 | 
						|
        )
 | 
						|
        if version_match:
 | 
						|
            return version_match.group(1)
 | 
						|
        raise RuntimeError("Unable to find version string in {}.".format(infile))
 | 
						|
 | 
						|
 | 
						|
here = os.path.abspath(os.path.dirname(__file__))
 | 
						|
_version: str = versionfromfile(here, "haystack", "_version.py")
 | 
						|
 | 
						|
setup(
 | 
						|
    name="farm-haystack",
 | 
						|
    version=_version,
 | 
						|
    author="Malte Pietsch, Timo Moeller, Branden Chan, Tanay Soni",
 | 
						|
    author_email="malte.pietsch@deepset.ai",
 | 
						|
    description="Neural Question Answering & Semantic Search at Scale. Use modern transformer based models like BERT to find answers in large document collections",
 | 
						|
    long_description=open("README.md", "r", encoding="utf-8").read(),
 | 
						|
    long_description_content_type="text/markdown",
 | 
						|
    keywords="QA Question-Answering Reader Retriever semantic-search search BERT roberta albert squad mrc transfer-learning language-model transformer",
 | 
						|
    license="Apache",
 | 
						|
    url="https://github.com/deepset-ai/haystack",
 | 
						|
    download_url=f"https://github.com/deepset-ai/haystack/archive/{_version}.tar.gz",
 | 
						|
    packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
 | 
						|
    dependency_links=dependency_links,
 | 
						|
    install_requires=parsed_requirements,
 | 
						|
    python_requires=">=3.7.0",
 | 
						|
    tests_require=["pytest"],
 | 
						|
    classifiers=[
 | 
						|
        "Intended Audience :: Science/Research",
 | 
						|
        "License :: OSI Approved :: Apache Software License",
 | 
						|
        "Programming Language :: Python :: 3",
 | 
						|
        "Topic :: Scientific/Engineering :: Artificial Intelligence",
 | 
						|
    ]
 | 
						|
)
 |