LightRAG/lightrag/kg/milvus_impl.py

95 lines
3.4 KiB
Python
Raw Normal View History

2024-12-04 17:26:47 +08:00
import asyncio
import os
from tqdm.asyncio import tqdm as tqdm_async
from dataclasses import dataclass
import numpy as np
from lightrag.utils import logger
from ..base import BaseVectorStorage
2025-01-27 09:35:26 +01:00
import pipmaster as pm
import configparser
2025-01-27 23:21:34 +08:00
2025-01-27 09:35:26 +01:00
if not pm.is_installed("pymilvus"):
pm.install("pymilvus")
2024-12-04 17:26:47 +08:00
from pymilvus import MilvusClient
config = configparser.ConfigParser()
config.read("config.ini", "utf-8")
2024-12-04 17:26:47 +08:00
@dataclass
class MilvusVectorDBStorge(BaseVectorStorage):
@staticmethod
def create_collection_if_not_exist(
client: MilvusClient, collection_name: str, **kwargs
):
if client.has_collection(collection_name):
return
client.create_collection(
collection_name, max_length=64, id_type="string", **kwargs
)
def __post_init__(self):
self._client = MilvusClient(
uri = os.environ.get("MILVUS_URI", config.get("milvus", "uri", fallback=os.path.join(self.global_config["working_dir"], "milvus_lite.db"))),
user = os.environ.get("MILVUS_USER", config.get("milvus", "user", fallback=None)),
password = os.environ.get("MILVUS_PASSWORD", config.get("milvus", "password", fallback=None)),
token = os.environ.get("MILVUS_TOKEN", config.get("milvus", "token", fallback=None)),
db_name = os.environ.get("MILVUS_DB_NAME", config.get("milvus", "db_name", fallback=None)),
2024-12-04 17:26:47 +08:00
)
self._max_batch_size = self.global_config["embedding_batch_num"]
MilvusVectorDBStorge.create_collection_if_not_exist(
self._client,
self.namespace,
dimension=self.embedding_func.embedding_dim,
)
async def upsert(self, data: dict[str, dict]):
logger.info(f"Inserting {len(data)} vectors to {self.namespace}")
if not len(data):
logger.warning("You insert an empty data to vector DB")
return []
list_data = [
{
"id": k,
**{k1: v1 for k1, v1 in v.items() if k1 in self.meta_fields},
}
for k, v in data.items()
]
contents = [v["content"] for v in data.values()]
batches = [
contents[i : i + self._max_batch_size]
for i in range(0, len(contents), self._max_batch_size)
]
2024-12-13 16:48:22 +08:00
async def wrapped_task(batch):
result = await self.embedding_func(batch)
pbar.update(1)
return result
embedding_tasks = [wrapped_task(batch) for batch in batches]
2024-12-13 20:15:49 +08:00
pbar = tqdm_async(
total=len(embedding_tasks), desc="Generating embeddings", unit="batch"
)
2024-12-13 16:48:22 +08:00
embeddings_list = await asyncio.gather(*embedding_tasks)
2024-12-04 17:26:47 +08:00
embeddings = np.concatenate(embeddings_list)
for i, d in enumerate(list_data):
d["vector"] = embeddings[i]
results = self._client.upsert(collection_name=self.namespace, data=list_data)
return results
async def query(self, query, top_k=5):
embedding = await self.embedding_func([query])
results = self._client.search(
collection_name=self.namespace,
data=embedding,
limit=top_k,
output_fields=list(self.meta_fields),
search_params={"metric_type": "COSINE", "params": {"radius": 0.2}},
)
print(results)
return [
{**dp["entity"], "id": dp["id"], "distance": dp["distance"]}
for dp in results[0]
]