LightRAG/examples/lightrag_multi_model_all_modes_demo.py

89 lines
2.3 KiB
Python
Raw Normal View History

2025-03-23 21:33:49 +05:30
import os
import asyncio
from lightrag import LightRAG, QueryParam
from lightrag.llm.openai import gpt_4o_mini_complete, gpt_4o_complete, openai_embed
from lightrag.kg.shared_storage import initialize_pipeline_status
2025-03-23 21:42:56 +05:30
WORKING_DIR = "./lightrag_demo"
2025-03-23 21:33:49 +05:30
if not os.path.exists(WORKING_DIR):
os.mkdir(WORKING_DIR)
2025-03-25 15:20:09 +05:30
2025-03-23 21:33:49 +05:30
async def initialize_rag():
rag = LightRAG(
working_dir=WORKING_DIR,
embedding_func=openai_embed,
2025-03-23 21:42:56 +05:30
llm_model_func=gpt_4o_mini_complete, # Default model for queries
2025-03-23 21:33:49 +05:30
)
await rag.initialize_storages()
await initialize_pipeline_status()
return rag
2025-03-25 15:20:09 +05:30
2025-03-23 21:33:49 +05:30
def main():
# Initialize RAG instance
rag = asyncio.run(initialize_rag())
# Load the data
with open("./book.txt", "r", encoding="utf-8") as f:
rag.insert(f.read())
2025-03-23 21:42:56 +05:30
# Query with naive mode (default model)
print("--- NAIVE mode ---")
print(
rag.query(
2025-03-25 15:20:09 +05:30
"What are the main themes in this story?", param=QueryParam(mode="naive")
2025-03-23 21:33:49 +05:30
)
2025-03-23 21:42:56 +05:30
)
# Query with local mode (default model)
print("\n--- LOCAL mode ---")
print(
rag.query(
2025-03-25 15:20:09 +05:30
"What are the main themes in this story?", param=QueryParam(mode="local")
2025-03-23 21:33:49 +05:30
)
)
2025-03-23 21:42:56 +05:30
# Query with global mode (default model)
print("\n--- GLOBAL mode ---")
print(
rag.query(
2025-03-25 15:20:09 +05:30
"What are the main themes in this story?", param=QueryParam(mode="global")
2025-03-23 21:33:49 +05:30
)
)
2025-03-23 21:42:56 +05:30
# Query with hybrid mode (default model)
print("\n--- HYBRID mode ---")
print(
rag.query(
2025-03-25 15:20:09 +05:30
"What are the main themes in this story?", param=QueryParam(mode="hybrid")
2025-03-23 21:42:56 +05:30
)
)
# Query with mix mode (default model)
print("\n--- MIX mode ---")
print(
rag.query(
2025-03-25 15:20:09 +05:30
"What are the main themes in this story?", param=QueryParam(mode="mix")
2025-03-23 21:42:56 +05:30
)
)
# Query with a custom model (gpt-4o) for a more complex question
print("\n--- Using custom model for complex analysis ---")
print(
rag.query(
"How does the character development reflect Victorian-era attitudes?",
param=QueryParam(
mode="global",
2025-03-25 15:20:09 +05:30
model_func=gpt_4o_complete, # Override default model with more capable one
),
2025-03-23 21:42:56 +05:30
)
)
2025-03-23 21:33:49 +05:30
2025-03-25 15:20:09 +05:30
2025-03-23 21:33:49 +05:30
if __name__ == "__main__":
2025-03-25 15:20:09 +05:30
main()