delete_all_documents() replaced by delete_documents() (#1377)

* [UPDT] delete_all_documents() replaced by delete_documents()

* [UPDT] warning logs to be fixed

* [UPDT] delete_all_documents() renamed and the same method added

Co-authored-by: Ram Garg <ramgarg102@gmai.com>
This commit is contained in:
ramgarg102 2021-08-30 18:48:28 +05:30 committed by GitHub
parent be8d305190
commit 51f0a56e5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 31 additions and 13 deletions

View File

@ -678,6 +678,21 @@ class WeaviateDocumentStore(BaseDocumentStore):
def delete_all_documents(self, index: Optional[str] = None, filters: Optional[Dict[str, List[str]]] = None):
"""
Delete documents in an index. All documents are deleted if no filters are passed.
:param index: Index name to delete the document from.
:param filters: Optional filters to narrow down the documents to be deleted.
:return: None
"""
logger.warning(
"""DEPRECATION WARNINGS:
1. delete_all_documents() method is deprecated, please use delete_documents method
For more details, please refer to the issue: https://github.com/deepset-ai/haystack/issues/1045
"""
)
self.delete_documents(index, filters)
def delete_documents(self, index: Optional[str] = None, filters: Optional[Dict[str, List[str]]] = None):
"""
Delete documents in an index. All documents are deleted if no filters are passed.
:param index: Index name to delete the document from.
:param filters: Optional filters to narrow down the documents to be deleted.
@ -691,3 +706,6 @@ class WeaviateDocumentStore(BaseDocumentStore):
else:
self.weaviate_client.schema.delete_class(index)
self._create_schema_and_index_if_not_exist(index)

View File

@ -406,7 +406,7 @@ def document_store_with_docs(request, test_docs_xs):
document_store = get_document_store(request.param)
document_store.write_documents(test_docs_xs)
yield document_store
document_store.delete_all_documents()
document_store.delete_documents()
@pytest.fixture
@ -414,7 +414,7 @@ def document_store(request, test_docs_xs):
vector_dim = request.node.get_closest_marker("vector_dim", pytest.mark.vector_dim(768))
document_store = get_document_store(request.param, vector_dim.args[0])
yield document_store
document_store.delete_all_documents()
document_store.delete_documents()
def get_document_store(document_store_type, embedding_dim=768, embedding_field="embedding"):

View File

@ -324,7 +324,7 @@ def test_update_embeddings(document_store, retriever):
def test_delete_all_documents(document_store_with_docs):
assert len(document_store_with_docs.get_all_documents()) == 3
document_store_with_docs.delete_all_documents()
document_store_with_docs.delete_documents()
documents = document_store_with_docs.get_all_documents()
assert len(documents) == 0

View File

@ -193,7 +193,7 @@ def test_faiss_passing_index_from_outside(tmp_path):
sql_url=f"sqlite:////{tmp_path/'haystack_test_faiss.db'}", faiss_index=faiss_index, index=index
)
document_store.delete_all_documents()
document_store.delete_documents()
# as it is a IVF index we need to train it before adding docs
document_store.train_index(DOCUMENTS)

View File

@ -31,13 +31,13 @@ def document_store_with_docs(request):
document_store = get_document_store(request.param)
document_store.write_documents(DOCUMENTS_XS)
yield document_store
document_store.delete_all_documents()
document_store.delete_documents()
@pytest.fixture(params=["weaviate"])
def document_store(request):
document_store = get_document_store(request.param)
yield document_store
document_store.delete_all_documents()
document_store.delete_documents()
@pytest.mark.weaviate
@pytest.mark.parametrize("document_store_with_docs", ["weaviate"], indirect=True)
@ -316,7 +316,7 @@ def test_query(document_store_with_docs):
def test_delete_all_documents(document_store_with_docs):
assert len(document_store_with_docs.get_all_documents()) == 3
document_store_with_docs.delete_all_documents()
document_store_with_docs.delete_documents()
documents = document_store_with_docs.get_all_documents()
assert len(documents) == 0

View File

@ -217,7 +217,7 @@
"# Initialize DocumentStore and index documents\n",
"launch_es()\n",
"document_store = ElasticsearchDocumentStore()\n",
"document_store.delete_all_documents()\n",
"document_store.delete_documents()\n",
"document_store.write_documents(got_dicts)\n",
"\n",
"# Initialize Sparse retriever\n",

View File

@ -29,7 +29,7 @@ def tutorial11_pipelines():
# Initialize DocumentStore and index documents
launch_es()
document_store = ElasticsearchDocumentStore()
document_store.delete_all_documents()
document_store.delete_documents()
document_store.write_documents(got_dicts)
# Initialize Sparse retriever

View File

@ -6838,7 +6838,7 @@
"# Initialize DocumentStore and index documents\n",
"launch_es()\n",
"document_store = ElasticsearchDocumentStore()\n",
"document_store.delete_all_documents()\n",
"document_store.delete_documents()\n",
"document_store.write_documents(got_dicts)\n",
"\n",
"# Initialize Sparse retriever\n",

View File

@ -29,7 +29,7 @@ def tutorial14_query_classifier():
# Initialize DocumentStore and index documents
launch_es()
document_store = ElasticsearchDocumentStore()
document_store.delete_all_documents()
document_store.delete_documents()
document_store.write_documents(got_dicts)
# Initialize Sparse retriever

View File

@ -224,7 +224,7 @@
"outputs": [],
"source": [
"# Delete existing documents in documents store\n",
"document_store.delete_all_documents()\n",
"document_store.delete_documents()\n",
"\n",
"# Write documents to document store\n",
"document_store.write_documents(documents)\n",

View File

@ -65,7 +65,7 @@ def tutorial7_rag_generator():
)
# Delete existing documents in documents store
document_store.delete_all_documents()
document_store.delete_documents()
# Write documents to document store
document_store.write_documents(documents)
# Add documents embeddings to index