import os from lightrag import LightRAG, QueryParam from lightrag.llm.ollama import ollama_model_complete, ollama_embed from lightrag.utils import EmbeddingFunc # WorkingDir ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) WORKING_DIR = os.path.join(ROOT_DIR, "myKG") if not os.path.exists(WORKING_DIR): os.mkdir(WORKING_DIR) print(f"WorkingDir: {WORKING_DIR}") # mongo os.environ["MONGO_URI"] = "mongodb://root:root@localhost:27017/" os.environ["MONGO_DATABASE"] = "LightRAG" # neo4j BATCH_SIZE_NODES = 500 BATCH_SIZE_EDGES = 100 os.environ["NEO4J_URI"] = "bolt://localhost:7687" os.environ["NEO4J_USERNAME"] = "neo4j" os.environ["NEO4J_PASSWORD"] = "neo4j" # milvus os.environ["MILVUS_URI"] = "http://localhost:19530" os.environ["MILVUS_USER"] = "root" os.environ["MILVUS_PASSWORD"] = "root" os.environ["MILVUS_DB_NAME"] = "lightrag" rag = LightRAG( working_dir=WORKING_DIR, llm_model_func=ollama_model_complete, llm_model_name="qwen2.5:14b", llm_model_max_async=4, llm_model_max_token_size=32768, llm_model_kwargs={"host": "http://127.0.0.1:11434", "options": {"num_ctx": 32768}}, embedding_func=EmbeddingFunc( embedding_dim=1024, max_token_size=8192, func=lambda texts: ollama_embed( texts=texts, embed_model="bge-m3:latest", host="http://127.0.0.1:11434" ), ), kv_storage="MongoKVStorage", graph_storage="Neo4JStorage", vector_storage="MilvusVectorDBStorge", ) file = "./book.txt" with open(file, "r") as f: rag.insert(f.read()) print( rag.query("What are the top themes in this story?", param=QueryParam(mode="hybrid")) )