2021-10-04 11:21:00 +02:00
|
|
|
import os
|
2021-04-07 17:53:32 +02:00
|
|
|
from pathlib import Path
|
|
|
|
|
2020-10-16 13:25:31 +02:00
|
|
|
import pytest
|
|
|
|
from fastapi.testclient import TestClient
|
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
from rest_api.application import app
|
2020-10-16 13:25:31 +02:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
@pytest.fixture
|
2021-10-04 11:21:00 +02:00
|
|
|
def client() -> TestClient:
|
|
|
|
os.environ["PIPELINE_YAML_PATH"] = str((Path(__file__).parent / "samples"/"pipeline"/"test_pipeline.yaml").absolute())
|
2021-10-12 10:53:54 +02:00
|
|
|
os.environ["INDEXING_PIPELINE_NAME"] = "indexing_text_pipeline"
|
|
|
|
client = TestClient(app)
|
|
|
|
yield client
|
|
|
|
# Clean up
|
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
|
|
|
|
2020-10-16 13:25:31 +02:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
@pytest.fixture
|
2021-10-04 11:21:00 +02:00
|
|
|
def populated_client(client: TestClient) -> TestClient:
|
2021-10-12 10:53:54 +02:00
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
|
|
|
files_to_upload = [
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"pdf"/"sample_pdf_1.pdf").open('rb')},
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"pdf"/"sample_pdf_2.pdf").open('rb')}
|
|
|
|
]
|
|
|
|
for index, fi in enumerate(files_to_upload):
|
|
|
|
response = client.post(url="/file-upload", files=fi, data={"meta": f'{{"meta_key": "meta_value", "meta_index": "{index}"}}'})
|
|
|
|
assert 200 == response.status_code
|
2021-10-04 11:21:00 +02:00
|
|
|
yield client
|
2021-10-12 10:53:54 +02:00
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
def test_get_documents():
|
|
|
|
os.environ["PIPELINE_YAML_PATH"] = str((Path(__file__).parent / "samples"/"pipeline"/"test_pipeline.yaml").absolute())
|
|
|
|
os.environ["INDEXING_PIPELINE_NAME"] = "indexing_text_pipeline"
|
|
|
|
client = TestClient(app)
|
2020-10-16 13:25:31 +02:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
# Clean up to make sure the docstore is empty
|
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
2020-11-09 20:41:53 +01:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
# Upload the files
|
|
|
|
files_to_upload = [
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"docs"/"doc_1.txt").open('rb')},
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"docs"/"doc_2.txt").open('rb')}
|
|
|
|
]
|
|
|
|
for index, fi in enumerate(files_to_upload):
|
|
|
|
response = client.post(url="/file-upload", files=fi, data={"meta": f'{{"meta_key": "meta_value_get"}}'})
|
|
|
|
assert 200 == response.status_code
|
|
|
|
|
|
|
|
# Get the documents
|
|
|
|
response = client.post(url="/documents/get_by_filters", data='{"filters": {"meta_key": ["meta_value_get"]}}')
|
2021-10-04 11:21:00 +02:00
|
|
|
assert 200 == response.status_code
|
2021-10-12 10:53:54 +02:00
|
|
|
response_json = response.json()
|
|
|
|
|
|
|
|
# Make sure the right docs are found
|
|
|
|
assert len(response_json) == 2
|
|
|
|
names = [doc["meta"]["name"] for doc in response_json]
|
|
|
|
assert "doc_1.txt" in names
|
|
|
|
assert "doc_2.txt" in names
|
|
|
|
meta_keys = [doc["meta"]["meta_key"] for doc in response_json]
|
|
|
|
assert all("meta_value_get"==meta_key for meta_key in meta_keys)
|
|
|
|
|
|
|
|
|
|
|
|
def test_delete_documents():
|
|
|
|
os.environ["PIPELINE_YAML_PATH"] = str((Path(__file__).parent / "samples"/"pipeline"/"test_pipeline.yaml").absolute())
|
|
|
|
os.environ["INDEXING_PIPELINE_NAME"] = "indexing_text_pipeline"
|
|
|
|
client = TestClient(app)
|
|
|
|
|
|
|
|
# Clean up to make sure the docstore is empty
|
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
|
|
|
|
|
|
|
# Upload the files
|
|
|
|
files_to_upload = [
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"docs"/"doc_1.txt").open('rb')},
|
|
|
|
{'files': (Path(__file__).parent / "samples"/"docs"/"doc_2.txt").open('rb')}
|
|
|
|
]
|
|
|
|
for index, fi in enumerate(files_to_upload):
|
|
|
|
response = client.post(url="/file-upload", files=fi, data={"meta": f'{{"meta_key": "meta_value_del", "meta_index": "{index}"}}'})
|
|
|
|
assert 200 == response.status_code
|
2021-10-04 11:21:00 +02:00
|
|
|
|
2021-10-12 10:53:54 +02:00
|
|
|
# Make sure there are two docs
|
|
|
|
response = client.post(url="/documents/get_by_filters", data='{"filters": {"meta_key": ["meta_value_del"]}}')
|
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
|
|
|
assert len(response_json) == 2
|
|
|
|
|
|
|
|
# Delete one doc
|
|
|
|
response = client.post(url="/documents/delete_by_filters", data='{"filters": {"meta_index": ["0"]}}')
|
|
|
|
assert 200 == response.status_code
|
|
|
|
|
|
|
|
# Now there should be only one doc
|
|
|
|
response = client.post(url="/documents/get_by_filters", data='{"filters": {"meta_key": ["meta_value_del"]}}')
|
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
|
|
|
assert len(response_json) == 1
|
|
|
|
|
|
|
|
# Make sure the right doc was deleted
|
|
|
|
response = client.post(url="/documents/get_by_filters", data='{"filters": {"meta_index": ["0"]}}')
|
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
|
|
|
assert len(response_json) == 0
|
|
|
|
response = client.post(url="/documents/get_by_filters", data='{"filters": {"meta_index": ["1"]}}')
|
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
|
|
|
assert len(response_json) == 1
|
|
|
|
|
|
|
|
def test_file_upload(client: TestClient):
|
2021-10-04 11:21:00 +02:00
|
|
|
file_to_upload = {'files': (Path(__file__).parent / "samples"/"pdf"/"sample_pdf_1.pdf").open('rb')}
|
2021-04-07 17:53:32 +02:00
|
|
|
response = client.post(url="/file-upload", files=file_to_upload, data={"meta": '{"meta_key": "meta_value"}'})
|
2020-11-09 20:41:53 +01:00
|
|
|
assert 200 == response.status_code
|
2021-10-12 10:53:54 +02:00
|
|
|
client.post(url="/documents/delete_by_filters", data='{"filters": {}}')
|
2020-11-09 20:41:53 +01:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_query_with_no_filter(populated_client: TestClient):
|
2021-04-07 17:53:32 +02:00
|
|
|
query_with_no_filter_value = {"query": "Who made the PDF specification?"}
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.post(url="/query", json=query_with_no_filter_value)
|
2020-11-09 20:41:53 +01:00
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
2021-04-07 17:53:32 +02:00
|
|
|
assert response_json["answers"][0]["answer"] == "Adobe Systems"
|
2020-11-09 20:41:53 +01:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_query_with_one_filter(populated_client: TestClient):
|
2021-09-10 11:41:16 +02:00
|
|
|
query_with_filter = {"query": "Who made the PDF specification?", "params": {"filters": {"meta_key": "meta_value"}}}
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.post(url="/query", json=query_with_filter)
|
2020-11-09 20:41:53 +01:00
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
2021-04-07 17:53:32 +02:00
|
|
|
assert response_json["answers"][0]["answer"] == "Adobe Systems"
|
2020-11-09 20:41:53 +01:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_query_with_filter_list(populated_client: TestClient):
|
2021-09-10 11:41:16 +02:00
|
|
|
query_with_filter_list = {
|
|
|
|
"query": "Who made the PDF specification?",
|
|
|
|
"params": {"filters": {"meta_key": ["meta_value", "another_value"]}}
|
|
|
|
}
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.post(url="/query", json=query_with_filter_list)
|
2020-11-09 20:41:53 +01:00
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
2021-04-07 17:53:32 +02:00
|
|
|
assert response_json["answers"][0]["answer"] == "Adobe Systems"
|
2020-10-16 13:25:31 +02:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_query_with_invalid_filter(populated_client: TestClient):
|
2021-09-10 11:41:16 +02:00
|
|
|
query_with_invalid_filter = {
|
|
|
|
"query": "Who made the PDF specification?", "params": {"filters": {"meta_key": "invalid_value"}}
|
|
|
|
}
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.post(url="/query", json=query_with_invalid_filter)
|
2020-10-16 13:25:31 +02:00
|
|
|
assert 200 == response.status_code
|
|
|
|
response_json = response.json()
|
2021-04-07 17:53:32 +02:00
|
|
|
assert len(response_json["answers"]) == 0
|
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_write_feedback(populated_client: TestClient):
|
|
|
|
response = populated_client.post(url="/query", json={"query": "Who made the PDF specification?"})
|
|
|
|
response_json = response.json()
|
|
|
|
document_id = response_json["answers"][0]["document_id"]
|
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
feedback = {
|
|
|
|
"question": "Who made the PDF specification?",
|
|
|
|
"is_correct_answer": True,
|
|
|
|
"document_id": document_id,
|
|
|
|
"is_correct_document": True,
|
|
|
|
"answer": "Adobe Systems",
|
|
|
|
"offset_start_in_doc": 60
|
|
|
|
}
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.post(url="/feedback", json=feedback)
|
2021-04-07 17:53:32 +02:00
|
|
|
assert 200 == response.status_code
|
2020-10-16 13:25:31 +02:00
|
|
|
|
2021-10-04 11:21:00 +02:00
|
|
|
def test_export_feedback(populated_client: TestClient):
|
|
|
|
response = populated_client.post(url="/query", json={"query": "Who made the PDF specification?"})
|
|
|
|
response_json = response.json()
|
|
|
|
document_id = response_json["answers"][0]["document_id"]
|
|
|
|
|
|
|
|
feedback = {
|
|
|
|
"question": "Who made the PDF specification?",
|
|
|
|
"is_correct_answer": True,
|
|
|
|
"document_id": document_id,
|
|
|
|
"is_correct_document": True,
|
|
|
|
"answer": "Adobe Systems",
|
|
|
|
"offset_start_in_doc": 60
|
|
|
|
}
|
2021-04-07 17:53:32 +02:00
|
|
|
feedback_urls = [
|
|
|
|
"/export-feedback?full_document_context=true",
|
|
|
|
"/export-feedback?full_document_context=false&context_size=50",
|
|
|
|
"/export-feedback?full_document_context=false&context_size=50000",
|
|
|
|
]
|
|
|
|
for url in feedback_urls:
|
2021-10-04 11:21:00 +02:00
|
|
|
response = populated_client.get(url=url, json=feedback)
|
2021-04-07 17:53:32 +02:00
|
|
|
response_json = response.json()
|
|
|
|
context = response_json["data"][0]["paragraphs"][0]["context"]
|
|
|
|
answer_start = response_json["data"][0]["paragraphs"][0]["qas"][0]["answers"][0]["answer_start"]
|
|
|
|
answer = response_json["data"][0]["paragraphs"][0]["qas"][0]["answers"][0]["text"]
|
|
|
|
assert context[answer_start:answer_start+len(answer)] == answer
|