| 
									
										
										
										
											2024-12-06 08:17:20 +08:00
										 |  |  | import os | 
					
						
							|  |  |  | import asyncio | 
					
						
							|  |  |  | from lightrag import LightRAG, QueryParam | 
					
						
							| 
									
										
										
										
											2025-01-25 00:11:00 +01:00
										 |  |  | from lightrag.llm.openai import openai_complete_if_cache, openai_embed | 
					
						
							| 
									
										
										
										
											2024-12-06 08:17:20 +08:00
										 |  |  | from lightrag.utils import EmbeddingFunc | 
					
						
							|  |  |  | import numpy as np | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | WORKING_DIR = "./dickens" | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | if not os.path.exists(WORKING_DIR): | 
					
						
							|  |  |  |     os.mkdir(WORKING_DIR) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | async def llm_model_func( | 
					
						
							|  |  |  |     prompt, system_prompt=None, history_messages=[], keyword_extraction=False, **kwargs | 
					
						
							|  |  |  | ) -> str: | 
					
						
							|  |  |  |     return await openai_complete_if_cache( | 
					
						
							|  |  |  |         "solar-mini", | 
					
						
							|  |  |  |         prompt, | 
					
						
							|  |  |  |         system_prompt=system_prompt, | 
					
						
							|  |  |  |         history_messages=history_messages, | 
					
						
							|  |  |  |         api_key=os.getenv("UPSTAGE_API_KEY"), | 
					
						
							|  |  |  |         base_url="https://api.upstage.ai/v1/solar", | 
					
						
							|  |  |  |         **kwargs, | 
					
						
							|  |  |  |     ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | async def embedding_func(texts: list[str]) -> np.ndarray: | 
					
						
							| 
									
										
										
										
											2025-01-25 00:11:00 +01:00
										 |  |  |     return await openai_embed( | 
					
						
							| 
									
										
										
										
											2024-12-06 08:17:20 +08:00
										 |  |  |         texts, | 
					
						
							|  |  |  |         model="solar-embedding-1-large-query", | 
					
						
							|  |  |  |         api_key=os.getenv("UPSTAGE_API_KEY"), | 
					
						
							|  |  |  |         base_url="https://api.upstage.ai/v1/solar", | 
					
						
							|  |  |  |     ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | async def get_embedding_dim(): | 
					
						
							|  |  |  |     test_text = ["This is a test sentence."] | 
					
						
							|  |  |  |     embedding = await embedding_func(test_text) | 
					
						
							|  |  |  |     embedding_dim = embedding.shape[1] | 
					
						
							|  |  |  |     return embedding_dim | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # function test | 
					
						
							|  |  |  | async def test_funcs(): | 
					
						
							|  |  |  |     result = await llm_model_func("How are you?") | 
					
						
							|  |  |  |     print("llm_model_func: ", result) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     result = await embedding_func(["How are you?"]) | 
					
						
							|  |  |  |     print("embedding_func: ", result) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # asyncio.run(test_funcs()) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | async def main(): | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         embedding_dimension = await get_embedding_dim() | 
					
						
							|  |  |  |         print(f"Detected embedding dimension: {embedding_dimension}") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         rag = LightRAG( | 
					
						
							|  |  |  |             working_dir=WORKING_DIR, | 
					
						
							|  |  |  |             embedding_cache_config={ | 
					
						
							|  |  |  |                 "enabled": True, | 
					
						
							| 
									
										
										
										
											2024-12-06 08:18:09 +08:00
										 |  |  |                 "similarity_threshold": 0.90, | 
					
						
							| 
									
										
										
										
											2024-12-06 08:17:20 +08:00
										 |  |  |             }, | 
					
						
							|  |  |  |             llm_model_func=llm_model_func, | 
					
						
							|  |  |  |             embedding_func=EmbeddingFunc( | 
					
						
							|  |  |  |                 embedding_dim=embedding_dimension, | 
					
						
							|  |  |  |                 max_token_size=8192, | 
					
						
							|  |  |  |                 func=embedding_func, | 
					
						
							|  |  |  |             ), | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         with open("./book.txt", "r", encoding="utf-8") as f: | 
					
						
							|  |  |  |             await rag.ainsert(f.read()) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Perform naive search | 
					
						
							|  |  |  |         print( | 
					
						
							|  |  |  |             await rag.aquery( | 
					
						
							|  |  |  |                 "What are the top themes in this story?", param=QueryParam(mode="naive") | 
					
						
							|  |  |  |             ) | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Perform local search | 
					
						
							|  |  |  |         print( | 
					
						
							|  |  |  |             await rag.aquery( | 
					
						
							|  |  |  |                 "What are the top themes in this story?", param=QueryParam(mode="local") | 
					
						
							|  |  |  |             ) | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Perform global search | 
					
						
							|  |  |  |         print( | 
					
						
							|  |  |  |             await rag.aquery( | 
					
						
							|  |  |  |                 "What are the top themes in this story?", | 
					
						
							|  |  |  |                 param=QueryParam(mode="global"), | 
					
						
							|  |  |  |             ) | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Perform hybrid search | 
					
						
							|  |  |  |         print( | 
					
						
							|  |  |  |             await rag.aquery( | 
					
						
							|  |  |  |                 "What are the top themes in this story?", | 
					
						
							|  |  |  |                 param=QueryParam(mode="hybrid"), | 
					
						
							|  |  |  |             ) | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  |     except Exception as e: | 
					
						
							|  |  |  |         print(f"An error occurred: {e}") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | if __name__ == "__main__": | 
					
						
							|  |  |  |     asyncio.run(main()) |