mirror of
https://github.com/Azure-Samples/graphrag-accelerator.git
synced 2025-06-27 04:39:57 +00:00
Add model encodings download to backend docker image build (#207)
Co-authored-by: Josh Bradley <joshbradley@microsoft.com>
This commit is contained in:
parent
6dc1ad436f
commit
3330134c0b
2610
backend/poetry.lock
generated
2610
backend/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -44,6 +44,7 @@ azure-storage-blob = ">=12.19.0"
|
||||
datashaper = ">=0.0.46"
|
||||
environs = ">=9.5.0"
|
||||
fastapi = ">=0.110.0"
|
||||
fastapi-offline = ">=1.7.3"
|
||||
fastparquet = ">=2023.10.1"
|
||||
fsspec = ">=2024.2.0"
|
||||
graphrag = "==0.3.3"
|
||||
|
@ -13,6 +13,7 @@ from fastapi import (
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import Response
|
||||
from fastapi_offline import FastAPIOffline
|
||||
from kubernetes import (
|
||||
client,
|
||||
config,
|
||||
@ -89,9 +90,10 @@ async def lifespan(app: FastAPI):
|
||||
# shutdown/garbage collection code goes here
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
app = FastAPIOffline(
|
||||
docs_url="/manpage/docs",
|
||||
openapi_url="/manpage/openapi.json",
|
||||
root_path=os.getenv("API_ROOT_PATH", ""),
|
||||
title="GraphRAG",
|
||||
version=os.getenv("GRAPHRAG_VERSION", "undefined_version"),
|
||||
lifespan=lifespan,
|
||||
|
@ -10,6 +10,7 @@ ENV PIP_ROOT_USER_ACTION=ignore
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||
ENV SETUPTOOLS_USE_DISTUTILS=stdlib
|
||||
ENV PYTHONPATH=/backend
|
||||
ENV TIKTOKEN_CACHE_DIR=/opt/tiktoken_cache/
|
||||
|
||||
COPY backend /backend
|
||||
RUN cd backend \
|
||||
@ -20,6 +21,9 @@ RUN cd backend \
|
||||
# download all nltk data that graphrag requires
|
||||
RUN python -c "import nltk;nltk.download(['punkt','averaged_perceptron_tagger','maxent_ne_chunker','words','wordnet'])"
|
||||
|
||||
# download tiktoken model encodings
|
||||
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt-3.5-turbo'); tiktoken.encoding_for_model('gpt-4'); tiktoken.encoding_for_model('gpt-4o');"
|
||||
|
||||
WORKDIR /backend
|
||||
EXPOSE 80
|
||||
CMD ["uvicorn", "src.main:app", "--host", "0.0.0.0", "--port", "80"]
|
||||
|
Loading…
x
Reference in New Issue
Block a user