2025-01-27 09:08:14 +01:00
|
|
|
from dataclasses import dataclass
|
2025-02-09 21:24:13 +01:00
|
|
|
import os
|
2025-02-16 14:38:09 +01:00
|
|
|
from typing import Any, Union, final
|
2025-01-27 09:08:14 +01:00
|
|
|
|
|
|
|
from lightrag.base import (
|
|
|
|
DocProcessingStatus,
|
2025-02-09 19:21:49 +01:00
|
|
|
DocStatus,
|
2025-01-27 09:08:14 +01:00
|
|
|
DocStatusStorage,
|
|
|
|
)
|
2025-02-09 19:21:49 +01:00
|
|
|
from lightrag.utils import (
|
|
|
|
load_json,
|
|
|
|
logger,
|
|
|
|
write_json,
|
|
|
|
)
|
2025-01-27 09:08:14 +01:00
|
|
|
|
|
|
|
|
2025-02-16 14:38:09 +01:00
|
|
|
@final
|
2025-01-27 09:08:14 +01:00
|
|
|
@dataclass
|
|
|
|
class JsonDocStatusStorage(DocStatusStorage):
|
|
|
|
"""JSON implementation of document status storage"""
|
|
|
|
|
|
|
|
def __post_init__(self):
|
|
|
|
working_dir = self.global_config["working_dir"]
|
|
|
|
self._file_name = os.path.join(working_dir, f"kv_store_{self.namespace}.json")
|
2025-02-09 10:33:15 +01:00
|
|
|
self._data: dict[str, Any] = load_json(self._file_name) or {}
|
2025-01-27 09:08:14 +01:00
|
|
|
logger.info(f"Loaded document status storage with {len(self._data)} records")
|
|
|
|
|
2025-02-16 14:38:09 +01:00
|
|
|
async def filter_keys(self, keys: set[str]) -> set[str]:
|
2025-01-27 09:08:14 +01:00
|
|
|
"""Return keys that should be processed (not in storage or not successfully processed)"""
|
2025-02-16 14:38:09 +01:00
|
|
|
return set(keys) - set(self._data.keys())
|
2025-02-09 19:21:49 +01:00
|
|
|
|
|
|
|
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
|
|
|
|
result: list[dict[str, Any]] = []
|
|
|
|
for id in ids:
|
|
|
|
data = self._data.get(id, None)
|
|
|
|
if data:
|
|
|
|
result.append(data)
|
|
|
|
return result
|
2025-01-27 09:08:14 +01:00
|
|
|
|
2025-02-09 15:24:30 +01:00
|
|
|
async def get_status_counts(self) -> dict[str, int]:
|
2025-01-27 09:08:14 +01:00
|
|
|
"""Get counts of documents in each status"""
|
2025-02-16 14:51:24 +01:00
|
|
|
counts = {status.value: 0 for status in DocStatus}
|
2025-01-27 09:08:14 +01:00
|
|
|
for doc in self._data.values():
|
|
|
|
counts[doc["status"]] += 1
|
|
|
|
return counts
|
|
|
|
|
2025-02-16 15:52:59 +01:00
|
|
|
async def get_docs_by_status(
|
|
|
|
self, status: DocStatus
|
|
|
|
) -> dict[str, DocProcessingStatus]:
|
|
|
|
"""Get all documents with a specific status"""
|
2025-02-20 04:09:02 +08:00
|
|
|
result = {}
|
|
|
|
for k, v in self._data.items():
|
|
|
|
if v["status"] == status.value:
|
|
|
|
try:
|
|
|
|
# Make a copy of the data to avoid modifying the original
|
|
|
|
data = v.copy()
|
|
|
|
# If content is missing, use content_summary as content
|
|
|
|
if "content" not in data and "content_summary" in data:
|
|
|
|
data["content"] = data["content_summary"]
|
|
|
|
result[k] = DocProcessingStatus(**data)
|
|
|
|
except KeyError as e:
|
|
|
|
logger.error(f"Missing required field for document {k}: {e}")
|
|
|
|
continue
|
|
|
|
return result
|
2025-02-11 13:28:18 +08:00
|
|
|
|
2025-02-16 14:38:09 +01:00
|
|
|
async def index_done_callback(self) -> None:
|
2025-01-27 09:08:14 +01:00
|
|
|
write_json(self._data, self._file_name)
|
|
|
|
|
2025-02-16 14:50:04 +01:00
|
|
|
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
|
2025-02-19 22:22:41 +01:00
|
|
|
logger.info(f"Inserting {len(data)} to {self.namespace}")
|
|
|
|
if not data:
|
|
|
|
return
|
|
|
|
|
2025-01-27 09:08:14 +01:00
|
|
|
self._data.update(data)
|
|
|
|
await self.index_done_callback()
|
|
|
|
|
2025-02-09 21:12:39 +01:00
|
|
|
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
|
|
|
|
return self._data.get(id)
|
2025-01-27 09:08:14 +01:00
|
|
|
|
|
|
|
async def delete(self, doc_ids: list[str]):
|
|
|
|
for doc_id in doc_ids:
|
|
|
|
self._data.pop(doc_id, None)
|
2025-02-17 23:20:10 +01:00
|
|
|
await self.index_done_callback()
|
2025-02-18 10:24:54 +01:00
|
|
|
|
2025-02-18 10:22:16 +01:00
|
|
|
async def drop(self) -> None:
|
|
|
|
"""Drop the storage"""
|
|
|
|
self._data.clear()
|