2020-08-06 04:36:56 -04:00
|
|
|
import json
|
2020-04-15 14:04:30 +02:00
|
|
|
import logging
|
2020-07-07 12:28:41 +02:00
|
|
|
import time
|
2021-04-07 17:53:32 +02:00
|
|
|
from pathlib import Path
|
2021-06-24 12:34:42 +02:00
|
|
|
from typing import Dict, List, Optional, Union, Any
|
2020-04-15 14:04:30 +02:00
|
|
|
|
|
|
|
from fastapi import APIRouter
|
2021-04-07 17:53:32 +02:00
|
|
|
from pydantic import BaseModel
|
2020-10-15 18:41:36 +02:00
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
from haystack import Pipeline
|
2021-06-29 07:44:25 +02:00
|
|
|
from rest_api.config import PIPELINE_YAML_PATH, LOG_LEVEL, QUERY_PIPELINE_NAME, CONCURRENT_REQUEST_PER_WORKER
|
2020-06-22 12:07:12 +02:00
|
|
|
from rest_api.controller.utils import RequestLimiter
|
2020-04-15 14:04:30 +02:00
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
logging.getLogger("haystack").setLevel(LOG_LEVEL)
|
|
|
|
logger = logging.getLogger("haystack")
|
2020-11-04 09:54:02 +01:00
|
|
|
|
2020-04-15 14:04:30 +02:00
|
|
|
router = APIRouter()
|
|
|
|
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
class Request(BaseModel):
|
|
|
|
query: str
|
2021-09-10 11:41:16 +02:00
|
|
|
params: Optional[dict] = None
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Answer(BaseModel):
|
|
|
|
answer: Optional[str]
|
|
|
|
question: Optional[str]
|
|
|
|
score: Optional[float] = None
|
|
|
|
probability: Optional[float] = None
|
|
|
|
context: Optional[str]
|
2021-08-20 17:01:55 +02:00
|
|
|
offset_start: Optional[int]
|
|
|
|
offset_end: Optional[int]
|
2021-04-07 17:53:32 +02:00
|
|
|
offset_start_in_doc: Optional[int]
|
|
|
|
offset_end_in_doc: Optional[int]
|
|
|
|
document_id: Optional[str] = None
|
2021-06-24 12:34:42 +02:00
|
|
|
meta: Optional[Dict[str, Any]]
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Response(BaseModel):
|
|
|
|
query: str
|
|
|
|
answers: List[Answer]
|
|
|
|
|
|
|
|
|
|
|
|
PIPELINE = Pipeline.load_from_yaml(Path(PIPELINE_YAML_PATH), pipeline_name=QUERY_PIPELINE_NAME)
|
|
|
|
logger.info(f"Loaded pipeline nodes: {PIPELINE.graph.nodes.keys()}")
|
2021-06-29 07:44:25 +02:00
|
|
|
concurrency_limiter = RequestLimiter(CONCURRENT_REQUEST_PER_WORKER)
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
@router.post("/query", response_model=Response)
|
|
|
|
def query(request: Request):
|
|
|
|
with concurrency_limiter.run():
|
|
|
|
result = _process_request(PIPELINE, request)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def _process_request(pipeline, request) -> Response:
|
|
|
|
start_time = time.time()
|
|
|
|
|
2021-09-10 11:41:16 +02:00
|
|
|
params = request.params or {}
|
|
|
|
params["filters"] = params.get("filters", {})
|
2021-04-07 17:53:32 +02:00
|
|
|
filters = {}
|
2021-09-10 11:41:16 +02:00
|
|
|
if "filters" in params: # put filter values into a list and remove filters with null value
|
|
|
|
for key, values in params["filters"].items():
|
2021-04-07 17:53:32 +02:00
|
|
|
if values is None:
|
|
|
|
continue
|
|
|
|
if not isinstance(values, list):
|
|
|
|
values = [values]
|
|
|
|
filters[key] = values
|
2021-09-10 11:41:16 +02:00
|
|
|
params["filters"] = filters
|
|
|
|
result = pipeline.run(query=request.query, params=params)
|
2020-10-16 13:25:31 +02:00
|
|
|
end_time = time.time()
|
2021-09-10 11:41:16 +02:00
|
|
|
logger.info({"request": request.dict(), "response": result, "time": f"{(end_time - start_time):.2f}"})
|
2021-04-07 17:53:32 +02:00
|
|
|
|
|
|
|
return result
|