LightRAG/examples/lightrag_openai_demo.py

55 lines
1.3 KiB
Python
Raw Normal View History

2024-10-15 19:40:08 +08:00
import os
2025-03-03 18:33:42 +08:00
import asyncio
2024-10-15 19:40:08 +08:00
from lightrag import LightRAG, QueryParam
2025-02-11 11:42:46 +08:00
from lightrag.llm.openai import gpt_4o_mini_complete, openai_embed
2025-03-03 18:33:42 +08:00
from lightrag.kg.shared_storage import initialize_pipeline_status
2024-10-15 19:40:08 +08:00
2024-10-15 21:21:57 +08:00
WORKING_DIR = "./dickens"
2024-10-15 19:40:08 +08:00
if not os.path.exists(WORKING_DIR):
os.mkdir(WORKING_DIR)
2025-03-03 18:33:42 +08:00
async def initialize_rag():
rag = LightRAG(
working_dir=WORKING_DIR,
embedding_func=openai_embed,
llm_model_func=gpt_4o_mini_complete,
# llm_model_func=gpt_4o_complete
)
await rag.initialize_storages()
await initialize_pipeline_status()
return rag
def main():
# Initialize RAG instance
rag = asyncio.run(initialize_rag())
with open("./book.txt", "r", encoding="utf-8") as f:
rag.insert(f.read())
2024-10-15 19:40:08 +08:00
2025-03-03 18:33:42 +08:00
# Perform naive search
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="naive"))
)
2024-10-15 19:40:08 +08:00
2025-03-03 18:33:42 +08:00
# Perform local search
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="local"))
)
2024-10-15 19:40:08 +08:00
2025-03-03 18:33:42 +08:00
# Perform global search
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="global"))
)
2024-10-15 19:40:08 +08:00
2025-03-03 18:33:42 +08:00
# Perform hybrid search
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="hybrid"))
)
2024-10-15 19:40:08 +08:00
2025-03-03 18:33:42 +08:00
if __name__ == "__main__":
main()
2024-10-15 19:40:08 +08:00