LightRAG/lightrag/kg/json_doc_status_impl.py

92 lines
2.9 KiB
Python
Raw Normal View History

2025-01-27 09:08:14 +01:00
from dataclasses import dataclass
2025-02-09 21:24:13 +01:00
import os
from typing import Any, Union, final
2025-01-27 09:08:14 +01:00
from lightrag.base import (
DocProcessingStatus,
2025-02-09 19:21:49 +01:00
DocStatus,
2025-01-27 09:08:14 +01:00
DocStatusStorage,
)
2025-02-09 19:21:49 +01:00
from lightrag.utils import (
load_json,
logger,
write_json,
)
2025-01-27 09:08:14 +01:00
@final
2025-01-27 09:08:14 +01:00
@dataclass
class JsonDocStatusStorage(DocStatusStorage):
"""JSON implementation of document status storage"""
def __post_init__(self):
working_dir = self.global_config["working_dir"]
self._file_name = os.path.join(working_dir, f"kv_store_{self.namespace}.json")
2025-02-09 10:33:15 +01:00
self._data: dict[str, Any] = load_json(self._file_name) or {}
2025-01-27 09:08:14 +01:00
logger.info(f"Loaded document status storage with {len(self._data)} records")
async def filter_keys(self, keys: set[str]) -> set[str]:
2025-01-27 09:08:14 +01:00
"""Return keys that should be processed (not in storage or not successfully processed)"""
return set(keys) - set(self._data.keys())
2025-02-09 19:21:49 +01:00
async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
result: list[dict[str, Any]] = []
for id in ids:
data = self._data.get(id, None)
if data:
result.append(data)
return result
2025-01-27 09:08:14 +01:00
2025-02-09 15:24:30 +01:00
async def get_status_counts(self) -> dict[str, int]:
2025-01-27 09:08:14 +01:00
"""Get counts of documents in each status"""
2025-02-16 14:51:24 +01:00
counts = {status.value: 0 for status in DocStatus}
2025-01-27 09:08:14 +01:00
for doc in self._data.values():
counts[doc["status"]] += 1
return counts
2025-02-09 15:24:30 +01:00
async def get_failed_docs(self) -> dict[str, DocProcessingStatus]:
2025-02-09 19:21:49 +01:00
return {
k: DocProcessingStatus(**v)
for k, v in self._data.items()
if v["status"] == DocStatus.FAILED
}
2025-01-27 09:08:14 +01:00
2025-02-09 15:24:30 +01:00
async def get_pending_docs(self) -> dict[str, DocProcessingStatus]:
2025-02-09 19:21:49 +01:00
return {
k: DocProcessingStatus(**v)
for k, v in self._data.items()
if v["status"] == DocStatus.PENDING
}
2025-01-27 09:08:14 +01:00
2025-02-11 13:28:18 +08:00
async def get_processed_docs(self) -> dict[str, DocProcessingStatus]:
return {
k: DocProcessingStatus(**v)
for k, v in self._data.items()
if v["status"] == DocStatus.PROCESSED
}
2025-02-11 13:32:24 +08:00
2025-02-11 13:28:18 +08:00
async def get_processing_docs(self) -> dict[str, DocProcessingStatus]:
return {
k: DocProcessingStatus(**v)
for k, v in self._data.items()
if v["status"] == DocStatus.PROCESSING
}
async def index_done_callback(self) -> None:
2025-01-27 09:08:14 +01:00
write_json(self._data, self._file_name)
2025-02-16 14:50:04 +01:00
async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
2025-01-27 09:08:14 +01:00
self._data.update(data)
await self.index_done_callback()
2025-02-09 21:12:39 +01:00
async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
return self._data.get(id)
2025-01-27 09:08:14 +01:00
async def delete(self, doc_ids: list[str]):
for doc_id in doc_ids:
self._data.pop(doc_id, None)
await self.index_done_callback()
async def drop(self) -> None:
raise NotImplementedError