2020-08-06 04:36:56 -04:00
|
|
|
import json
|
2020-04-15 14:04:30 +02:00
|
|
|
import logging
|
2020-07-07 12:28:41 +02:00
|
|
|
import time
|
2021-04-07 17:53:32 +02:00
|
|
|
from pathlib import Path
|
|
|
|
from typing import Dict, List, Optional, Union
|
2020-04-15 14:04:30 +02:00
|
|
|
|
|
|
|
from fastapi import APIRouter
|
2021-04-07 17:53:32 +02:00
|
|
|
from pydantic import BaseModel
|
2020-10-15 18:41:36 +02:00
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
from haystack import Pipeline
|
|
|
|
from rest_api.config import PIPELINE_YAML_PATH, LOG_LEVEL, QUERY_PIPELINE_NAME
|
2020-06-22 12:07:12 +02:00
|
|
|
from rest_api.controller.utils import RequestLimiter
|
2020-04-15 14:04:30 +02:00
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
logging.getLogger("haystack").setLevel(LOG_LEVEL)
|
|
|
|
logger = logging.getLogger("haystack")
|
2020-11-04 09:54:02 +01:00
|
|
|
|
2020-04-15 14:04:30 +02:00
|
|
|
router = APIRouter()
|
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
class Request(BaseModel):
|
|
|
|
query: str
|
|
|
|
filters: Optional[Dict[str, Optional[Union[str, List[str]]]]] = None
|
2021-06-24 09:53:08 +02:00
|
|
|
top_k_retriever: Optional[int] = None
|
|
|
|
top_k_reader: Optional[int] = None
|
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Answer(BaseModel):
|
|
|
|
answer: Optional[str]
|
|
|
|
question: Optional[str]
|
|
|
|
score: Optional[float] = None
|
|
|
|
probability: Optional[float] = None
|
|
|
|
context: Optional[str]
|
|
|
|
offset_start: int
|
|
|
|
offset_end: int
|
|
|
|
offset_start_in_doc: Optional[int]
|
|
|
|
offset_end_in_doc: Optional[int]
|
|
|
|
document_id: Optional[str] = None
|
|
|
|
meta: Optional[Dict[str, str]]
|
|
|
|
|
|
|
|
|
|
|
|
class Response(BaseModel):
|
|
|
|
query: str
|
|
|
|
answers: List[Answer]
|
|
|
|
|
|
|
|
|
|
|
|
PIPELINE = Pipeline.load_from_yaml(Path(PIPELINE_YAML_PATH), pipeline_name=QUERY_PIPELINE_NAME)
|
|
|
|
logger.info(f"Loaded pipeline nodes: {PIPELINE.graph.nodes.keys()}")
|
|
|
|
concurrency_limiter = RequestLimiter(4)
|
|
|
|
|
|
|
|
|
|
|
|
@router.post("/query", response_model=Response)
|
|
|
|
def query(request: Request):
|
|
|
|
with concurrency_limiter.run():
|
|
|
|
result = _process_request(PIPELINE, request)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def _process_request(pipeline, request) -> Response:
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
filters = {}
|
|
|
|
if request.filters:
|
|
|
|
# put filter values into a list and remove filters with null value
|
|
|
|
for key, values in request.filters.items():
|
|
|
|
if values is None:
|
|
|
|
continue
|
|
|
|
if not isinstance(values, list):
|
|
|
|
values = [values]
|
|
|
|
filters[key] = values
|
|
|
|
|
2021-06-24 09:53:08 +02:00
|
|
|
result = pipeline.run(query=request.query,
|
|
|
|
filters=filters,
|
|
|
|
top_k_retriever=request.top_k_retriever,
|
|
|
|
top_k_reader=request.top_k_reader)
|
2021-04-07 17:53:32 +02:00
|
|
|
|
2020-10-16 13:25:31 +02:00
|
|
|
end_time = time.time()
|
2021-04-07 17:53:32 +02:00
|
|
|
logger.info(json.dumps({"request": request.dict(), "response": result, "time": f"{(end_time - start_time):.2f}"}))
|
|
|
|
|
|
|
|
return result
|