2024-10-18 14:17:14 +01:00
|
|
|
|
"""
|
|
|
|
|
LightRAG meets Amazon Bedrock ⛰️
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import os
|
2024-10-18 16:50:02 +01:00
|
|
|
|
import logging
|
2024-10-18 14:17:14 +01:00
|
|
|
|
|
|
|
|
|
from lightrag import LightRAG, QueryParam
|
2025-01-25 00:11:00 +01:00
|
|
|
|
from lightrag.llm.bedrock import bedrock_complete, bedrock_embed
|
2024-10-18 14:17:14 +01:00
|
|
|
|
from lightrag.utils import EmbeddingFunc
|
|
|
|
|
|
2024-10-18 16:50:02 +01:00
|
|
|
|
logging.getLogger("aiobotocore").setLevel(logging.WARNING)
|
2024-10-18 14:17:14 +01:00
|
|
|
|
|
2024-10-18 16:50:02 +01:00
|
|
|
|
WORKING_DIR = "./dickens"
|
2024-10-18 14:17:14 +01:00
|
|
|
|
if not os.path.exists(WORKING_DIR):
|
|
|
|
|
os.mkdir(WORKING_DIR)
|
|
|
|
|
|
|
|
|
|
rag = LightRAG(
|
|
|
|
|
working_dir=WORKING_DIR,
|
|
|
|
|
llm_model_func=bedrock_complete,
|
2024-10-18 16:50:02 +01:00
|
|
|
|
llm_model_name="Anthropic Claude 3 Haiku // Amazon Bedrock",
|
2024-10-18 14:17:14 +01:00
|
|
|
|
embedding_func=EmbeddingFunc(
|
2025-01-25 00:11:00 +01:00
|
|
|
|
embedding_dim=1024, max_token_size=8192, func=bedrock_embed
|
2024-10-19 09:43:17 +05:30
|
|
|
|
),
|
2024-10-18 14:17:14 +01:00
|
|
|
|
)
|
|
|
|
|
|
2024-10-19 09:43:17 +05:30
|
|
|
|
with open("./book.txt", "r", encoding="utf-8") as f:
|
2024-10-18 14:17:14 +01:00
|
|
|
|
rag.insert(f.read())
|
|
|
|
|
|
2024-10-18 16:50:02 +01:00
|
|
|
|
for mode in ["naive", "local", "global", "hybrid"]:
|
|
|
|
|
print("\n+-" + "-" * len(mode) + "-+")
|
|
|
|
|
print(f"| {mode.capitalize()} |")
|
|
|
|
|
print("+-" + "-" * len(mode) + "-+\n")
|
|
|
|
|
print(
|
2024-10-19 09:43:17 +05:30
|
|
|
|
rag.query("What are the top themes in this story?", param=QueryParam(mode=mode))
|
2024-10-18 16:50:02 +01:00
|
|
|
|
)
|