LightRAG/examples/test_neo4j.py

47 lines
1.2 KiB
Python
Raw Normal View History

2024-10-29 15:36:07 -04:00
import os
from lightrag import LightRAG, QueryParam
from lightrag.llm.openai import gpt_4o_mini_complete
2024-10-29 15:36:07 -04:00
2024-11-02 18:35:07 -04:00
2024-10-29 15:36:07 -04:00
#########
# Uncomment the below two lines if running in a jupyter notebook to handle the async nature of rag.insert()
2024-11-06 11:18:14 -05:00
# import nest_asyncio
# nest_asyncio.apply()
2024-10-29 15:36:07 -04:00
#########
WORKING_DIR = "./local_neo4jWorkDir"
2024-10-29 15:36:07 -04:00
if not os.path.exists(WORKING_DIR):
os.mkdir(WORKING_DIR)
rag = LightRAG(
working_dir=WORKING_DIR,
llm_model_func=gpt_4o_mini_complete, # Use gpt_4o_mini_complete LLM model
2024-12-03 16:04:58 +08:00
graph_storage="Neo4JStorage",
2024-11-06 11:18:14 -05:00
log_level="INFO",
2024-10-29 15:36:07 -04:00
# llm_model_func=gpt_4o_complete # Optionally, use a stronger model
)
2024-11-01 16:11:19 -04:00
with open("./book.txt") as f:
rag.insert(f.read())
2024-10-29 15:36:07 -04:00
# Perform naive search
2024-11-06 11:18:14 -05:00
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="naive"))
)
2024-10-29 15:36:07 -04:00
# Perform local search
2024-11-06 11:18:14 -05:00
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="local"))
)
2024-10-29 15:36:07 -04:00
# Perform global search
2024-11-06 11:18:14 -05:00
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="global"))
)
2024-10-29 15:36:07 -04:00
# Perform hybrid search
2024-11-06 11:18:14 -05:00
print(
rag.query("What are the top themes in this story?", param=QueryParam(mode="hybrid"))
)