feat: move cli out from preview (#5055)

* move cli from preview

* readme

* review feedback

* test mocks & import paths

* import path
This commit is contained in:
ZanSara 2023-05-31 18:34:14 +02:00 committed by GitHub
parent e209abd48e
commit 89de76d5fe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 62 additions and 18 deletions

44
haystack/cli/README.md Normal file
View File

@ -0,0 +1,44 @@
# Haystack CLI
<p align="center" float="left">
<img alt="" src="https://raw.githubusercontent.com/deepset-ai/.github/main/deepset-logo-colored.png" width="30%"/>
<img alt="" src="https://raw.githubusercontent.com/deepset-ai/.github/main/haystack-logo-colored-on-dark.png#gh-dark-mode-only" width="30%"/>
<img alt="" src="https://raw.githubusercontent.com/deepset-ai/.github/main/haystack-logo-colored.png#gh-light-mode-only" width="30%"/>
</p>
<strong><a href="https://github.com/deepset-ai/haystack">Haystack</a></strong> is an open source NLP framework by <strong><a href="https://deepset.ai">deepset</a></strong> to help you build production ready search systems or applications powered by various NLP tasks such as Question Answering. Haystack is designed to help you build systems that work intelligently over large document collections. It achieves this with the concept of <strong>Pipelines</strong> consisting of various <strong>Nodes</strong> such as a <strong>DocumentStore</strong>, a <strong>Retriever</strong> and a <strong>Reader</strong>.
This is the repository where we keep the code for the Haystack CLI.
To contribute to the tutorials please check out our [Contributing Guidelines](./Contributing.md)
## Available commands
### `haystack prompt fetch`
```
Usage: haystack prompt fetch [OPTIONS] [PROMPT_NAME]...
Downloads a prompt from the official Haystack PromptHub and saves it locally
to ease use in environments with no network.
PROMPT_NAME can be specified multiple times.
PROMPTHUB_CACHE_PATH environment variable can be set to change the default
folder in which the prompts will be saved in.
If a custom PROMPTHUB_CACHE_PATH is used, remember to also use it for
Haystack invocations.
The Haystack PromptHub is https://prompthub.deepset.ai/
Options:
--help Show this message and exit.
```
Example usage:
```
haystack prompt fetch deepset/conversational-agent-with-tools deepset/summarization
```

View File

@ -1,7 +1,7 @@
import click
from haystack import __version__
from haystack.preview.cli.prompt import prompt
from haystack.cli.prompt import prompt
@click.group()

View File

@ -1,6 +1,6 @@
import click
from haystack.preview.cli.prompt import fetch
from haystack.cli.prompt import fetch
@click.group(short_help="Prompts related commands")

View File

@ -6,16 +6,16 @@ from haystack.nodes.prompt.prompt_template import PromptNotFoundError, fetch_fro
@click.command(
short_help="Downloads and saves prompts from Haystack PromptHub",
help="""
Downloads a prompt from the official Haystack PromptHub and saves
it locally to ease use in enviroments with no network.
Downloads a prompt from the official Haystack PromptHub and saves it locally
to ease use in environments with no network.
PROMPT_NAME can be specified multiple times.
PROMPTHUB_CACHE_PATH environment variable can be set to change the
default folder in which the prompts will be saved in.
PROMPTHUB_CACHE_PATH environment variable can be set to change the default
folder in which the prompts will be saved in.
If a custom PROMPTHUB_CACHE_PATH is used remember to also used it
for Haystack invocations.
If a custom PROMPTHUB_CACHE_PATH is used, remember to also use it for
Haystack invocations.
The Haystack PromptHub is https://prompthub.deepset.ai/
""",

View File

@ -220,7 +220,7 @@ all-gpu = [
]
[project.scripts]
haystack = "haystack.preview.cli.entry_point:main"
haystack = "haystack.cli.entry_point:main"
[project.urls]
"CI: GitHub" = "https://github.com/deepset-ai/haystack/actions"

View File

@ -2,13 +2,13 @@ from unittest.mock import patch
import pytest
from haystack.preview.cli.entry_point import main_cli
from haystack.cli.entry_point import main_cli
from haystack.nodes.prompt.prompt_template import PromptNotFoundError
@pytest.mark.unit
@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_no_args(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(main_cli, ["prompt", "fetch"])
assert response.exit_code == 0
@ -18,8 +18,8 @@ def test_prompt_fetch_no_args(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(main_cli, ["prompt", "fetch", "deepset/question-generation"])
assert response.exit_code == 0
@ -29,8 +29,8 @@ def test_prompt_fetch(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_with_multiple_prompts(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(
main_cli, ["prompt", "fetch", "deepset/question-generation", "deepset/conversational-agent"]
@ -45,8 +45,8 @@ def test_prompt_fetch_with_multiple_prompts(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_with_unexisting_prompt(mock_cache, mock_fetch, cli_runner):
prompt_name = "deepset/martian-speak"
error_message = f"Prompt template named '{prompt_name}' not available in the Prompt Hub."