2025-01-22 16:42:13 +08:00
|
|
|
import os
|
2025-02-16 15:52:59 +01:00
|
|
|
from typing import Any, final
|
2025-01-22 16:42:13 +08:00
|
|
|
from dataclasses import dataclass
|
2025-01-27 09:39:58 +01:00
|
|
|
import pipmaster as pm
|
2025-02-11 00:55:52 +08:00
|
|
|
import configparser
|
2025-01-27 23:21:34 +08:00
|
|
|
|
2025-01-27 09:39:58 +01:00
|
|
|
if not pm.is_installed("redis"):
|
|
|
|
pm.install("redis")
|
2025-01-25 00:55:07 +01:00
|
|
|
|
2025-01-25 00:11:00 +01:00
|
|
|
# aioredis is a depricated library, replaced with redis
|
2025-03-31 01:34:41 +08:00
|
|
|
from redis.asyncio import Redis # type: ignore
|
|
|
|
from lightrag.utils import logger
|
2025-01-22 16:42:13 +08:00
|
|
|
from lightrag.base import BaseKVStorage
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
2025-02-11 00:55:52 +08:00
|
|
|
config = configparser.ConfigParser()
|
|
|
|
config.read("config.ini", "utf-8")
|
|
|
|
|
2025-02-16 15:54:54 +01:00
|
|
|
|
2025-02-16 15:52:59 +01:00
|
|
|
@final
|
2025-01-22 16:42:13 +08:00
|
|
|
@dataclass
|
|
|
|
class RedisKVStorage(BaseKVStorage):
|
|
|
|
def __post_init__(self):
|
2025-02-11 03:29:40 +08:00
|
|
|
redis_url = os.environ.get(
|
|
|
|
"REDIS_URI", config.get("redis", "uri", fallback="redis://localhost:6379")
|
|
|
|
)
|
2025-01-25 00:11:00 +01:00
|
|
|
self._redis = Redis.from_url(redis_url, decode_responses=True)
|
2025-01-22 16:42:13 +08:00
|
|
|
logger.info(f"Use Redis as KV {self.namespace}")
|
2025-02-09 15:24:30 +01:00
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def get_by_id(self, id: str) -> dict[str, Any] | None:
|
2025-01-22 16:42:13 +08:00
|
|
|
data = await self._redis.get(f"{self.namespace}:{id}")
|
|
|
|
return json.loads(data) if data else None
|
|
|
|
|
2025-02-09 10:33:15 +01:00
|
|
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
2025-01-22 16:42:13 +08:00
|
|
|
pipe = self._redis.pipeline()
|
|
|
|
for id in ids:
|
|
|
|
pipe.get(f"{self.namespace}:{id}")
|
|
|
|
results = await pipe.execute()
|
|
|
|
return [json.loads(result) if result else None for result in results]
|
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def filter_keys(self, keys: set[str]) -> set[str]:
|
2025-01-22 16:42:13 +08:00
|
|
|
pipe = self._redis.pipeline()
|
2025-02-16 13:55:30 +01:00
|
|
|
for key in keys:
|
2025-01-22 16:42:13 +08:00
|
|
|
pipe.exists(f"{self.namespace}:{key}")
|
|
|
|
results = await pipe.execute()
|
|
|
|
|
2025-02-16 13:55:30 +01:00
|
|
|
existing_ids = {keys[i] for i, exists in enumerate(results) if exists}
|
|
|
|
return set(keys) - existing_ids
|
2025-01-22 16:42:13 +08:00
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
2025-02-19 22:22:41 +01:00
|
|
|
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
|
|
|
if not data:
|
|
|
|
return
|
2025-01-22 16:42:13 +08:00
|
|
|
pipe = self._redis.pipeline()
|
2025-02-18 19:58:03 +01:00
|
|
|
|
|
|
|
for k, v in data.items():
|
2025-01-22 16:42:13 +08:00
|
|
|
pipe.set(f"{self.namespace}:{k}", json.dumps(v))
|
|
|
|
await pipe.execute()
|
|
|
|
|
|
|
|
for k in data:
|
|
|
|
data[k]["_id"] = k
|
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def index_done_callback(self) -> None:
|
2025-02-16 16:04:07 +01:00
|
|
|
# Redis handles persistence automatically
|
2025-02-18 10:21:54 +01:00
|
|
|
pass
|
2025-03-04 15:50:53 +08:00
|
|
|
|
|
|
|
async def delete(self, ids: list[str]) -> None:
|
|
|
|
"""Delete entries with specified IDs
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-03-04 15:50:53 +08:00
|
|
|
Args:
|
|
|
|
ids: List of entry IDs to be deleted
|
|
|
|
"""
|
|
|
|
if not ids:
|
|
|
|
return
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-03-04 15:50:53 +08:00
|
|
|
pipe = self._redis.pipeline()
|
|
|
|
for id in ids:
|
|
|
|
pipe.delete(f"{self.namespace}:{id}")
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-03-04 15:50:53 +08:00
|
|
|
results = await pipe.execute()
|
|
|
|
deleted_count = sum(results)
|
2025-03-04 15:53:20 +08:00
|
|
|
logger.info(
|
|
|
|
f"Deleted {deleted_count} of {len(ids)} entries from {self.namespace}"
|
|
|
|
)
|
2025-03-31 01:40:14 +08:00
|
|
|
|
2025-03-31 23:10:21 +08:00
|
|
|
async def drop_cache_by_modes(self, modes: list[str] | None = None) -> bool:
|
|
|
|
"""Delete specific records from storage by by cache mode
|
|
|
|
|
|
|
|
Importance notes for Redis storage:
|
|
|
|
1. This will immediately delete the specified cache modes from Redis
|
|
|
|
|
|
|
|
Args:
|
|
|
|
modes (list[str]): List of cache mode to be drop from storage
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True: if the cache drop successfully
|
|
|
|
False: if the cache drop failed
|
|
|
|
"""
|
|
|
|
if not modes:
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
await self.delete(modes)
|
|
|
|
return True
|
|
|
|
except Exception:
|
|
|
|
return False
|
|
|
|
|
2025-03-31 01:40:14 +08:00
|
|
|
async def drop(self) -> dict[str, str]:
|
|
|
|
"""Drop the storage by removing all keys under the current namespace.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict[str, str]: Status of the operation with keys 'status' and 'message'
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
keys = await self._redis.keys(f"{self.namespace}:*")
|
|
|
|
|
|
|
|
if keys:
|
|
|
|
pipe = self._redis.pipeline()
|
|
|
|
for key in keys:
|
|
|
|
pipe.delete(key)
|
|
|
|
results = await pipe.execute()
|
|
|
|
deleted_count = sum(results)
|
|
|
|
|
|
|
|
logger.info(f"Dropped {deleted_count} keys from {self.namespace}")
|
|
|
|
return {"status": "success", "message": f"{deleted_count} keys dropped"}
|
|
|
|
else:
|
|
|
|
logger.info(f"No keys found to drop in {self.namespace}")
|
|
|
|
return {"status": "success", "message": "no keys to drop"}
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(f"Error dropping keys from {self.namespace}: {e}")
|
|
|
|
return {"status": "error", "message": str(e)}
|