diff --git a/haystack/cli/README.md b/haystack/cli/README.md
new file mode 100644
index 000000000..831e45b9c
--- /dev/null
+++ b/haystack/cli/README.md
@@ -0,0 +1,44 @@
+# Haystack CLI
+
+
+
+
+
+
+
+Haystack is an open source NLP framework by deepset to help you build production ready search systems or applications powered by various NLP tasks such as Question Answering. Haystack is designed to help you build systems that work intelligently over large document collections. It achieves this with the concept of Pipelines consisting of various Nodes such as a DocumentStore, a Retriever and a Reader.
+
+
+This is the repository where we keep the code for the Haystack CLI.
+
+To contribute to the tutorials please check out our [Contributing Guidelines](./Contributing.md)
+
+## Available commands
+
+### `haystack prompt fetch`
+
+```
+Usage: haystack prompt fetch [OPTIONS] [PROMPT_NAME]...
+
+ Downloads a prompt from the official Haystack PromptHub and saves it locally
+ to ease use in environments with no network.
+
+ PROMPT_NAME can be specified multiple times.
+
+ PROMPTHUB_CACHE_PATH environment variable can be set to change the default
+ folder in which the prompts will be saved in.
+
+ If a custom PROMPTHUB_CACHE_PATH is used, remember to also use it for
+ Haystack invocations.
+
+ The Haystack PromptHub is https://prompthub.deepset.ai/
+
+Options:
+ --help Show this message and exit.
+```
+
+Example usage:
+
+```
+haystack prompt fetch deepset/conversational-agent-with-tools deepset/summarization
+```
diff --git a/haystack/preview/cli/__init__.py b/haystack/cli/__init__.py
similarity index 100%
rename from haystack/preview/cli/__init__.py
rename to haystack/cli/__init__.py
diff --git a/haystack/preview/cli/entry_point.py b/haystack/cli/entry_point.py
similarity index 79%
rename from haystack/preview/cli/entry_point.py
rename to haystack/cli/entry_point.py
index 7c12c2445..08e311bf8 100644
--- a/haystack/preview/cli/entry_point.py
+++ b/haystack/cli/entry_point.py
@@ -1,7 +1,7 @@
import click
from haystack import __version__
-from haystack.preview.cli.prompt import prompt
+from haystack.cli.prompt import prompt
@click.group()
diff --git a/haystack/preview/cli/prompt/__init__.py b/haystack/cli/prompt/__init__.py
similarity index 73%
rename from haystack/preview/cli/prompt/__init__.py
rename to haystack/cli/prompt/__init__.py
index f282f3c4c..4baf81a06 100644
--- a/haystack/preview/cli/prompt/__init__.py
+++ b/haystack/cli/prompt/__init__.py
@@ -1,6 +1,6 @@
import click
-from haystack.preview.cli.prompt import fetch
+from haystack.cli.prompt import fetch
@click.group(short_help="Prompts related commands")
diff --git a/haystack/preview/cli/prompt/fetch.py b/haystack/cli/prompt/fetch.py
similarity index 76%
rename from haystack/preview/cli/prompt/fetch.py
rename to haystack/cli/prompt/fetch.py
index dba9f8ebc..abd04c25d 100644
--- a/haystack/preview/cli/prompt/fetch.py
+++ b/haystack/cli/prompt/fetch.py
@@ -6,16 +6,16 @@ from haystack.nodes.prompt.prompt_template import PromptNotFoundError, fetch_fro
@click.command(
short_help="Downloads and saves prompts from Haystack PromptHub",
help="""
- Downloads a prompt from the official Haystack PromptHub and saves
- it locally to ease use in enviroments with no network.
+ Downloads a prompt from the official Haystack PromptHub and saves it locally
+ to ease use in environments with no network.
PROMPT_NAME can be specified multiple times.
- PROMPTHUB_CACHE_PATH environment variable can be set to change the
- default folder in which the prompts will be saved in.
+ PROMPTHUB_CACHE_PATH environment variable can be set to change the default
+ folder in which the prompts will be saved in.
- If a custom PROMPTHUB_CACHE_PATH is used remember to also used it
- for Haystack invocations.
+ If a custom PROMPTHUB_CACHE_PATH is used, remember to also use it for
+ Haystack invocations.
The Haystack PromptHub is https://prompthub.deepset.ai/
""",
diff --git a/pyproject.toml b/pyproject.toml
index 35f8f4f83..3e98e4bc6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -220,7 +220,7 @@ all-gpu = [
]
[project.scripts]
-haystack = "haystack.preview.cli.entry_point:main"
+haystack = "haystack.cli.entry_point:main"
[project.urls]
"CI: GitHub" = "https://github.com/deepset-ai/haystack/actions"
diff --git a/test/preview/cli/conftest.py b/test/cli/conftest.py
similarity index 100%
rename from test/preview/cli/conftest.py
rename to test/cli/conftest.py
diff --git a/test/preview/cli/test_prompt_fetch.py b/test/cli/test_prompt_fetch.py
similarity index 76%
rename from test/preview/cli/test_prompt_fetch.py
rename to test/cli/test_prompt_fetch.py
index 2eba20d64..56851c84f 100644
--- a/test/preview/cli/test_prompt_fetch.py
+++ b/test/cli/test_prompt_fetch.py
@@ -2,13 +2,13 @@ from unittest.mock import patch
import pytest
-from haystack.preview.cli.entry_point import main_cli
+from haystack.cli.entry_point import main_cli
from haystack.nodes.prompt.prompt_template import PromptNotFoundError
@pytest.mark.unit
-@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
-@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
+@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
+@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_no_args(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(main_cli, ["prompt", "fetch"])
assert response.exit_code == 0
@@ -18,8 +18,8 @@ def test_prompt_fetch_no_args(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
-@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
-@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
+@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
+@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(main_cli, ["prompt", "fetch", "deepset/question-generation"])
assert response.exit_code == 0
@@ -29,8 +29,8 @@ def test_prompt_fetch(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
-@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
-@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
+@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
+@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_with_multiple_prompts(mock_cache, mock_fetch, cli_runner):
response = cli_runner.invoke(
main_cli, ["prompt", "fetch", "deepset/question-generation", "deepset/conversational-agent"]
@@ -45,8 +45,8 @@ def test_prompt_fetch_with_multiple_prompts(mock_cache, mock_fetch, cli_runner):
@pytest.mark.unit
-@patch("haystack.preview.cli.prompt.fetch.fetch_from_prompthub")
-@patch("haystack.preview.cli.prompt.fetch.cache_prompt")
+@patch("haystack.cli.prompt.fetch.fetch_from_prompthub")
+@patch("haystack.cli.prompt.fetch.cache_prompt")
def test_prompt_fetch_with_unexisting_prompt(mock_cache, mock_fetch, cli_runner):
prompt_name = "deepset/martian-speak"
error_message = f"Prompt template named '{prompt_name}' not available in the Prompt Hub."