mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-07-18 14:31:49 +00:00

* Adding dummy generator implementation * Adding tutorial to try the model * Committing current non working code * Committing current update where we need to call generate function directly and need to convert embedding to tensor way * Addressing review comments. * Refactoring finder, and implementing rag_generator class. * Refined the implementation of RAGGenerator and now it is in clean shape * Renaming RAGGenerator to RAGenerator * Reverting change from finder.py and addressing review comments * Remove support for RagSequenceForGeneration * Utilizing embed_passage function from DensePassageRetriever * Adding sample test data to verify generator output * Updating testing script * Updating testing script * Fixing bug related to top_k * Updating latest farm dependency * Comment out farm dependency * Reverting changes from TransformersReader * Adding transformers dataset to compare transformers and haystack generator implementation * Using generator_encoder instead of question_encoder to generate context_input_ids * Adding workaround to install FARM dependency from master branch * Removing unnecessary changes * Fixing generator test * Removing transformers datasets * Fixing generator test * Some cleanup and updating TODO comments * Adding tutorial notebook * Updating tutorials with comments * Explicitly passing token model in RAG test * Addressing review comments * Fixing notebook * Refactoring tests to reduce memory footprint * Split generator tests in separate ci step and before running it reclaim memory by terminating containers * Moving tika dependent test to separate dir * Remove unwanted code * Brining reader under session scope * Farm is now session object hence restoring changes from default value * Updating assert for pdf converter * Dummy commit to trigger CI flow * REducing memory footprint required for generator tests * Fixing mypy issues * Marking test with tika and elasticsearch markers. Reverting changes in CI and pytest splits * reducing changes * Fixing CI * changing elastic search ci * Fixing test error * Disabling return of embedding * Marking generator test as well * Refactoring tutorials * Increasing ES memory to 750M * Trying another fix for ES CI * Reverting CI changes * Splitting tests in CI * Generator and non-generator markers split * Adding pytest.ini to add markers and enable strict-markers option * Reducing elastic search container memory * Simplifying generator test by using documents with embedding directly * Bump up farm to 0.5.0
61 lines
2.1 KiB
Python
61 lines
2.1 KiB
Python
import pytest
|
|
from fastapi.testclient import TestClient
|
|
|
|
from haystack import Finder
|
|
from haystack.retriever.sparse import ElasticsearchRetriever
|
|
|
|
# TODO: Add integration tests for other APIs
|
|
|
|
|
|
def get_test_client_and_override_dependencies(reader, document_store_with_docs):
|
|
from rest_api.application import app
|
|
from rest_api.controller import search
|
|
|
|
search.document_store = document_store_with_docs
|
|
search.retriever = ElasticsearchRetriever(document_store=document_store_with_docs)
|
|
search.FINDERS = {1: Finder(reader=reader, retriever=search.retriever)}
|
|
|
|
return TestClient(app)
|
|
|
|
|
|
@pytest.mark.slow
|
|
@pytest.mark.elasticsearch
|
|
@pytest.mark.parametrize("document_store_with_docs", ["elasticsearch"], indirect=True)
|
|
@pytest.mark.parametrize("reader", ["farm"], indirect=True)
|
|
def test_query_api(reader, document_store_with_docs):
|
|
client = get_test_client_and_override_dependencies(reader, document_store_with_docs)
|
|
|
|
query = {
|
|
"size": 1,
|
|
"query": {
|
|
"bool": {
|
|
"should": [
|
|
{
|
|
"multi_match": {
|
|
"query": "Where Paul lives?"
|
|
}
|
|
}
|
|
],
|
|
"filter": [
|
|
{
|
|
"terms": {
|
|
"name": "filename2"
|
|
}
|
|
}
|
|
]
|
|
}
|
|
}
|
|
}
|
|
|
|
response = client.post(url="/models/1/query?top_k_reader=1", json=query)
|
|
assert 200 == response.status_code
|
|
response_json = response.json()
|
|
assert 1 == response_json['hits']['total']['value']
|
|
assert 1 == len(response_json['hits']['hits'])
|
|
assert response_json['hits']['hits'][0]["_score"] is not None
|
|
assert response_json['hits']['hits'][0]["_source"]["meta"] is not None
|
|
assert response_json['hits']['hits'][0]["_id"] is not None
|
|
assert "New York" == response_json['hits']['hits'][0]["_source"]["answer"]
|
|
assert "My name is Paul and I live in New York" == response_json['hits']['hits'][0]["_source"]["context"]
|
|
|