mirror of
https://github.com/deepset-ai/haystack.git
synced 2025-11-01 02:09:39 +00:00
Add env var CONCURRENT_REQUEST_PER_WORKER (#1235)
* we create an env var `CONCURRENT_REQUEST_PER_WORKER` following your naming convention, (I came a few commit backwards to find the original name) * default to 4
This commit is contained in:
parent
2c964db62d
commit
73a4f9825a
@ -8,3 +8,5 @@ FILE_UPLOAD_PATH = os.getenv("FILE_UPLOAD_PATH", "./file-upload")
|
||||
|
||||
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
|
||||
ROOT_PATH = os.getenv("ROOT_PATH", "/")
|
||||
|
||||
CONCURRENT_REQUEST_PER_WORKER = os.getenv("CONCURRENT_REQUEST_PER_WORKER", 4)
|
||||
|
||||
@ -8,7 +8,7 @@ from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
|
||||
from haystack import Pipeline
|
||||
from rest_api.config import PIPELINE_YAML_PATH, LOG_LEVEL, QUERY_PIPELINE_NAME
|
||||
from rest_api.config import PIPELINE_YAML_PATH, LOG_LEVEL, QUERY_PIPELINE_NAME, CONCURRENT_REQUEST_PER_WORKER
|
||||
from rest_api.controller.utils import RequestLimiter
|
||||
|
||||
logging.getLogger("haystack").setLevel(LOG_LEVEL)
|
||||
@ -46,7 +46,7 @@ class Response(BaseModel):
|
||||
|
||||
PIPELINE = Pipeline.load_from_yaml(Path(PIPELINE_YAML_PATH), pipeline_name=QUERY_PIPELINE_NAME)
|
||||
logger.info(f"Loaded pipeline nodes: {PIPELINE.graph.nodes.keys()}")
|
||||
concurrency_limiter = RequestLimiter(4)
|
||||
concurrency_limiter = RequestLimiter(CONCURRENT_REQUEST_PER_WORKER)
|
||||
|
||||
|
||||
@router.post("/query", response_model=Response)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user