mirror of
https://github.com/HKUDS/LightRAG.git
synced 2025-08-06 15:51:48 +00:00
Fix linting
This commit is contained in:
parent
271722405f
commit
86c9a0cda2
@ -615,11 +615,13 @@ class PGKVStorage(BaseKVStorage):
|
||||
# Map field names and add cache_type for compatibility
|
||||
processed_row = {
|
||||
**row,
|
||||
"return": row.get("return_value", ""), # Map return_value to return
|
||||
"return": row.get(
|
||||
"return_value", ""
|
||||
), # Map return_value to return
|
||||
"cache_type": cache_type, # Add cache_type from key
|
||||
"original_prompt": row.get("original_prompt", ""),
|
||||
"chunk_id": row.get("chunk_id"),
|
||||
"mode": row.get("mode", "default")
|
||||
"mode": row.get("mode", "default"),
|
||||
}
|
||||
processed_results[row["id"]] = processed_row
|
||||
return processed_results
|
||||
|
@ -14,7 +14,12 @@ from redis.asyncio import Redis, ConnectionPool # type: ignore
|
||||
from redis.exceptions import RedisError, ConnectionError # type: ignore
|
||||
from lightrag.utils import logger
|
||||
|
||||
from lightrag.base import BaseKVStorage, DocStatusStorage, DocStatus, DocProcessingStatus
|
||||
from lightrag.base import (
|
||||
BaseKVStorage,
|
||||
DocStatusStorage,
|
||||
DocStatus,
|
||||
DocProcessingStatus,
|
||||
)
|
||||
import json
|
||||
|
||||
|
||||
@ -256,7 +261,9 @@ class RedisKVStorage(BaseKVStorage):
|
||||
mode_keys = []
|
||||
|
||||
while True:
|
||||
cursor, keys = await redis.scan(cursor, match=pattern, count=1000)
|
||||
cursor, keys = await redis.scan(
|
||||
cursor, match=pattern, count=1000
|
||||
)
|
||||
if keys:
|
||||
mode_keys.extend(keys)
|
||||
|
||||
@ -264,7 +271,9 @@ class RedisKVStorage(BaseKVStorage):
|
||||
break
|
||||
|
||||
keys_to_delete.extend(mode_keys)
|
||||
logger.info(f"Found {len(mode_keys)} keys for mode '{mode}' with pattern '{pattern}'")
|
||||
logger.info(
|
||||
f"Found {len(mode_keys)} keys for mode '{mode}' with pattern '{pattern}'"
|
||||
)
|
||||
|
||||
if keys_to_delete:
|
||||
# Batch delete
|
||||
@ -419,7 +428,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
try:
|
||||
async with self._get_redis_connection() as redis:
|
||||
await redis.ping()
|
||||
logger.info(f"Connected to Redis for doc status namespace {self.namespace}")
|
||||
logger.info(
|
||||
f"Connected to Redis for doc status namespace {self.namespace}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to Redis for doc status: {e}")
|
||||
raise
|
||||
@ -495,7 +506,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
# Use SCAN to iterate through all keys in the namespace
|
||||
cursor = 0
|
||||
while True:
|
||||
cursor, keys = await redis.scan(cursor, match=f"{self.namespace}:*", count=1000)
|
||||
cursor, keys = await redis.scan(
|
||||
cursor, match=f"{self.namespace}:*", count=1000
|
||||
)
|
||||
if keys:
|
||||
# Get all values in batch
|
||||
pipe = redis.pipeline()
|
||||
@ -531,7 +544,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
# Use SCAN to iterate through all keys in the namespace
|
||||
cursor = 0
|
||||
while True:
|
||||
cursor, keys = await redis.scan(cursor, match=f"{self.namespace}:*", count=1000)
|
||||
cursor, keys = await redis.scan(
|
||||
cursor, match=f"{self.namespace}:*", count=1000
|
||||
)
|
||||
if keys:
|
||||
# Get all values in batch
|
||||
pipe = redis.pipeline()
|
||||
@ -551,7 +566,10 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
# Make a copy of the data to avoid modifying the original
|
||||
data = doc_data.copy()
|
||||
# If content is missing, use content_summary as content
|
||||
if "content" not in data and "content_summary" in data:
|
||||
if (
|
||||
"content" not in data
|
||||
and "content_summary" in data
|
||||
):
|
||||
data["content"] = data["content_summary"]
|
||||
# If file_path is not in data, use document id as file path
|
||||
if "file_path" not in data:
|
||||
@ -559,7 +577,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
|
||||
result[doc_id] = DocProcessingStatus(**data)
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
logger.error(f"Error processing document {key}: {e}")
|
||||
logger.error(
|
||||
f"Error processing document {key}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
if cursor == 0:
|
||||
@ -610,7 +630,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
|
||||
results = await pipe.execute()
|
||||
deleted_count = sum(results)
|
||||
logger.info(f"Deleted {deleted_count} of {len(doc_ids)} doc status entries from {self.namespace}")
|
||||
logger.info(
|
||||
f"Deleted {deleted_count} of {len(doc_ids)} doc status entries from {self.namespace}"
|
||||
)
|
||||
|
||||
async def drop(self) -> dict[str, str]:
|
||||
"""Drop all document status data from storage and clean up resources"""
|
||||
@ -634,7 +656,9 @@ class RedisDocStatusStorage(DocStatusStorage):
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
logger.info(f"Dropped {deleted_count} doc status keys from {self.namespace}")
|
||||
logger.info(
|
||||
f"Dropped {deleted_count} doc status keys from {self.namespace}"
|
||||
)
|
||||
return {"status": "success", "message": "data dropped"}
|
||||
except Exception as e:
|
||||
logger.error(f"Error dropping doc status {self.namespace}: {e}")
|
||||
|
Loading…
x
Reference in New Issue
Block a user