2025-01-22 16:42:13 +08:00
|
|
|
import os
|
2025-02-16 15:52:59 +01:00
|
|
|
from typing import Any, final
|
2025-01-22 16:42:13 +08:00
|
|
|
from dataclasses import dataclass
|
2025-01-27 09:39:58 +01:00
|
|
|
import pipmaster as pm
|
2025-02-11 00:55:52 +08:00
|
|
|
import configparser
|
2025-04-02 21:06:49 -07:00
|
|
|
from contextlib import asynccontextmanager
|
2025-01-27 23:21:34 +08:00
|
|
|
|
2025-01-27 09:39:58 +01:00
|
|
|
if not pm.is_installed("redis"):
|
|
|
|
pm.install("redis")
|
2025-01-25 00:55:07 +01:00
|
|
|
|
2025-01-25 00:11:00 +01:00
|
|
|
# aioredis is a depricated library, replaced with redis
|
2025-04-02 21:06:49 -07:00
|
|
|
from redis.asyncio import Redis, ConnectionPool
|
|
|
|
from redis.exceptions import RedisError, ConnectionError
|
2025-03-04 15:50:53 +08:00
|
|
|
from lightrag.utils import logger, compute_mdhash_id
|
2025-01-22 16:42:13 +08:00
|
|
|
from lightrag.base import BaseKVStorage
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
2025-02-11 00:55:52 +08:00
|
|
|
config = configparser.ConfigParser()
|
|
|
|
config.read("config.ini", "utf-8")
|
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
# Constants for Redis connection pool
|
|
|
|
MAX_CONNECTIONS = 50
|
|
|
|
SOCKET_TIMEOUT = 5.0
|
|
|
|
SOCKET_CONNECT_TIMEOUT = 3.0
|
|
|
|
|
2025-02-16 15:54:54 +01:00
|
|
|
|
2025-02-16 15:52:59 +01:00
|
|
|
@final
|
2025-01-22 16:42:13 +08:00
|
|
|
@dataclass
|
|
|
|
class RedisKVStorage(BaseKVStorage):
|
|
|
|
def __post_init__(self):
|
2025-02-11 03:29:40 +08:00
|
|
|
redis_url = os.environ.get(
|
|
|
|
"REDIS_URI", config.get("redis", "uri", fallback="redis://localhost:6379")
|
|
|
|
)
|
2025-04-02 21:06:49 -07:00
|
|
|
# Create a connection pool with limits
|
|
|
|
self._pool = ConnectionPool.from_url(
|
|
|
|
redis_url,
|
|
|
|
max_connections=MAX_CONNECTIONS,
|
|
|
|
decode_responses=True,
|
|
|
|
socket_timeout=SOCKET_TIMEOUT,
|
|
|
|
socket_connect_timeout=SOCKET_CONNECT_TIMEOUT
|
|
|
|
)
|
|
|
|
self._redis = Redis(connection_pool=self._pool)
|
|
|
|
logger.info(f"Initialized Redis connection pool for {self.namespace} with max {MAX_CONNECTIONS} connections")
|
|
|
|
|
|
|
|
@asynccontextmanager
|
|
|
|
async def _get_redis_connection(self):
|
|
|
|
"""Safe context manager for Redis operations."""
|
|
|
|
try:
|
|
|
|
yield self._redis
|
|
|
|
except ConnectionError as e:
|
|
|
|
logger.error(f"Redis connection error in {self.namespace}: {e}")
|
|
|
|
raise
|
|
|
|
except RedisError as e:
|
|
|
|
logger.error(f"Redis operation error in {self.namespace}: {e}")
|
|
|
|
raise
|
|
|
|
except Exception as e:
|
|
|
|
logger.error(f"Unexpected error in Redis operation for {self.namespace}: {e}")
|
|
|
|
raise
|
|
|
|
|
|
|
|
async def close(self):
|
|
|
|
"""Close the Redis connection pool to prevent resource leaks."""
|
|
|
|
if hasattr(self, '_redis') and self._redis:
|
|
|
|
await self._redis.close()
|
|
|
|
await self._pool.disconnect()
|
|
|
|
logger.debug(f"Closed Redis connection pool for {self.namespace}")
|
|
|
|
|
|
|
|
async def __aenter__(self):
|
|
|
|
"""Support for async context manager."""
|
|
|
|
return self
|
|
|
|
|
|
|
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
"""Ensure Redis resources are cleaned up when exiting context."""
|
|
|
|
await self.close()
|
2025-02-09 15:24:30 +01:00
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def get_by_id(self, id: str) -> dict[str, Any] | None:
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
try:
|
|
|
|
data = await redis.get(f"{self.namespace}:{id}")
|
|
|
|
return json.loads(data) if data else None
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logger.error(f"JSON decode error for id {id}: {e}")
|
|
|
|
return None
|
2025-01-22 16:42:13 +08:00
|
|
|
|
2025-02-09 10:33:15 +01:00
|
|
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
try:
|
|
|
|
pipe = redis.pipeline()
|
|
|
|
for id in ids:
|
|
|
|
pipe.get(f"{self.namespace}:{id}")
|
|
|
|
results = await pipe.execute()
|
|
|
|
return [json.loads(result) if result else None for result in results]
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
logger.error(f"JSON decode error in batch get: {e}")
|
|
|
|
return [None] * len(ids)
|
2025-01-22 16:42:13 +08:00
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def filter_keys(self, keys: set[str]) -> set[str]:
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
pipe = redis.pipeline()
|
|
|
|
for key in keys:
|
|
|
|
pipe.exists(f"{self.namespace}:{key}")
|
|
|
|
results = await pipe.execute()
|
2025-01-22 16:42:13 +08:00
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
existing_ids = {keys[i] for i, exists in enumerate(results) if exists}
|
|
|
|
return set(keys) - existing_ids
|
2025-01-22 16:42:13 +08:00
|
|
|
|
2025-02-16 13:31:12 +01:00
|
|
|
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
2025-02-19 22:22:41 +01:00
|
|
|
if not data:
|
|
|
|
return
|
2025-04-02 21:06:49 -07:00
|
|
|
|
|
|
|
logger.info(f"Inserting {len(data)} items to {self.namespace}")
|
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
try:
|
|
|
|
pipe = redis.pipeline()
|
|
|
|
for k, v in data.items():
|
|
|
|
pipe.set(f"{self.namespace}:{k}", json.dumps(v))
|
|
|
|
await pipe.execute()
|
|
|
|
|
|
|
|
for k in data:
|
|
|
|
data[k]["_id"] = k
|
|
|
|
except json.JSONEncodeError as e:
|
|
|
|
logger.error(f"JSON encode error during upsert: {e}")
|
|
|
|
raise
|
2025-03-04 15:50:53 +08:00
|
|
|
|
|
|
|
async def delete(self, ids: list[str]) -> None:
|
2025-04-02 21:06:49 -07:00
|
|
|
"""Delete entries with specified IDs"""
|
2025-03-04 15:50:53 +08:00
|
|
|
if not ids:
|
|
|
|
return
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
pipe = redis.pipeline()
|
|
|
|
for id in ids:
|
|
|
|
pipe.delete(f"{self.namespace}:{id}")
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
results = await pipe.execute()
|
|
|
|
deleted_count = sum(results)
|
|
|
|
logger.info(
|
|
|
|
f"Deleted {deleted_count} of {len(ids)} entries from {self.namespace}"
|
|
|
|
)
|
2025-03-04 15:50:53 +08:00
|
|
|
|
|
|
|
async def delete_entity(self, entity_name: str) -> None:
|
2025-04-02 21:06:49 -07:00
|
|
|
"""Delete an entity by name"""
|
2025-03-04 15:50:53 +08:00
|
|
|
try:
|
|
|
|
entity_id = compute_mdhash_id(entity_name, prefix="ent-")
|
2025-03-04 15:53:20 +08:00
|
|
|
logger.debug(
|
|
|
|
f"Attempting to delete entity {entity_name} with ID {entity_id}"
|
|
|
|
)
|
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
result = await redis.delete(f"{self.namespace}:{entity_id}")
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-04-02 21:06:49 -07:00
|
|
|
if result:
|
|
|
|
logger.debug(f"Successfully deleted entity {entity_name}")
|
|
|
|
else:
|
|
|
|
logger.debug(f"Entity {entity_name} not found in storage")
|
2025-03-04 15:50:53 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error(f"Error deleting entity {entity_name}: {e}")
|
|
|
|
|
|
|
|
async def delete_entity_relation(self, entity_name: str) -> None:
|
2025-04-02 21:06:49 -07:00
|
|
|
"""Delete all relations associated with an entity"""
|
2025-03-04 15:50:53 +08:00
|
|
|
try:
|
2025-04-02 21:06:49 -07:00
|
|
|
async with self._get_redis_connection() as redis:
|
|
|
|
cursor = 0
|
|
|
|
relation_keys = []
|
|
|
|
pattern = f"{self.namespace}:*"
|
|
|
|
|
|
|
|
while True:
|
|
|
|
cursor, keys = await redis.scan(cursor, match=pattern)
|
|
|
|
|
|
|
|
# Process keys in batches
|
|
|
|
pipe = redis.pipeline()
|
|
|
|
for key in keys:
|
|
|
|
pipe.get(key)
|
|
|
|
values = await pipe.execute()
|
|
|
|
|
|
|
|
for key, value in zip(keys, values):
|
|
|
|
if value:
|
|
|
|
try:
|
|
|
|
data = json.loads(value)
|
|
|
|
if (
|
|
|
|
data.get("src_id") == entity_name
|
|
|
|
or data.get("tgt_id") == entity_name
|
|
|
|
):
|
|
|
|
relation_keys.append(key)
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
logger.warning(f"Invalid JSON in key {key}")
|
|
|
|
continue
|
|
|
|
|
|
|
|
if cursor == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
# Delete relations in batches
|
|
|
|
if relation_keys:
|
|
|
|
# Delete in chunks to avoid too many arguments
|
|
|
|
chunk_size = 1000
|
|
|
|
for i in range(0, len(relation_keys), chunk_size):
|
|
|
|
chunk = relation_keys[i:i + chunk_size]
|
|
|
|
deleted = await redis.delete(*chunk)
|
|
|
|
logger.debug(f"Deleted {deleted} relations for {entity_name} (batch {i//chunk_size + 1})")
|
|
|
|
else:
|
|
|
|
logger.debug(f"No relations found for entity {entity_name}")
|
2025-03-04 15:53:20 +08:00
|
|
|
|
2025-03-04 15:50:53 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.error(f"Error deleting relations for {entity_name}: {e}")
|
2025-04-02 21:06:49 -07:00
|
|
|
|
|
|
|
async def index_done_callback(self) -> None:
|
|
|
|
# Redis handles persistence automatically
|
|
|
|
pass
|