2023-05-15 08:51:32 +08:00
|
|
|
import logging
|
|
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
import click
|
2025-08-24 23:07:22 +08:00
|
|
|
from celery import shared_task
|
2024-02-06 13:21:13 +08:00
|
|
|
|
2026-01-21 13:43:06 +08:00
|
|
|
from core.db.session_factory import session_factory
|
2024-02-22 23:31:57 +08:00
|
|
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
2023-05-15 08:51:32 +08:00
|
|
|
from extensions.ext_redis import redis_client
|
|
|
|
|
from models.dataset import DocumentSegment
|
|
|
|
|
|
2025-08-26 18:10:31 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2023-05-15 08:51:32 +08:00
|
|
|
|
2024-08-26 13:38:37 +08:00
|
|
|
@shared_task(queue="dataset")
|
2023-08-18 17:37:31 +08:00
|
|
|
def disable_segment_from_index_task(segment_id: str):
|
2023-05-15 08:51:32 +08:00
|
|
|
"""
|
2023-08-18 17:37:31 +08:00
|
|
|
Async disable segment from index
|
2023-05-15 08:51:32 +08:00
|
|
|
:param segment_id:
|
|
|
|
|
|
2023-08-18 17:37:31 +08:00
|
|
|
Usage: disable_segment_from_index_task.delay(segment_id)
|
2023-05-15 08:51:32 +08:00
|
|
|
"""
|
2025-08-26 18:10:31 +08:00
|
|
|
logger.info(click.style(f"Start disable segment from index: {segment_id}", fg="green"))
|
2023-05-15 08:51:32 +08:00
|
|
|
start_at = time.perf_counter()
|
|
|
|
|
|
2026-01-21 13:43:06 +08:00
|
|
|
with session_factory.create_session() as session:
|
|
|
|
|
segment = session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
|
|
|
|
|
if not segment:
|
|
|
|
|
logger.info(click.style(f"Segment not found: {segment_id}", fg="red"))
|
2023-06-25 16:49:14 +08:00
|
|
|
return
|
|
|
|
|
|
2026-01-21 13:43:06 +08:00
|
|
|
if segment.status != "completed":
|
|
|
|
|
logger.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red"))
|
2023-06-25 16:49:14 +08:00
|
|
|
return
|
|
|
|
|
|
2026-01-21 13:43:06 +08:00
|
|
|
indexing_cache_key = f"segment_{segment.id}_indexing"
|
2026-01-14 17:52:27 +08:00
|
|
|
|
2026-01-12 16:52:21 +08:00
|
|
|
try:
|
2026-01-21 13:43:06 +08:00
|
|
|
dataset = segment.dataset
|
|
|
|
|
|
|
|
|
|
if not dataset:
|
|
|
|
|
logger.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan"))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
dataset_document = segment.document
|
|
|
|
|
|
|
|
|
|
if not dataset_document:
|
|
|
|
|
logger.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan"))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if (
|
|
|
|
|
not dataset_document.enabled
|
|
|
|
|
or dataset_document.archived
|
|
|
|
|
or dataset_document.indexing_status != "completed"
|
|
|
|
|
):
|
|
|
|
|
logger.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan"))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
index_type = dataset_document.doc_form
|
|
|
|
|
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
|
|
|
|
index_processor.clean(dataset, [segment.index_node_id])
|
|
|
|
|
|
2026-01-21 16:03:54 +08:00
|
|
|
# Disable summary index for this segment
|
|
|
|
|
from services.summary_index_service import SummaryIndexService
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
SummaryIndexService.disable_summaries_for_segments(
|
|
|
|
|
dataset=dataset,
|
|
|
|
|
segment_ids=[segment.id],
|
|
|
|
|
disabled_by=segment.disabled_by,
|
|
|
|
|
)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning("Failed to disable summary for segment %s: %s", segment.id, str(e))
|
|
|
|
|
|
2026-01-21 13:43:06 +08:00
|
|
|
end_at = time.perf_counter()
|
|
|
|
|
logger.info(
|
|
|
|
|
click.style(
|
|
|
|
|
f"Segment removed from index: {segment.id} latency: {end_at - start_at}",
|
|
|
|
|
fg="green",
|
|
|
|
|
)
|
2026-01-12 16:52:21 +08:00
|
|
|
)
|
2026-01-21 13:43:06 +08:00
|
|
|
except Exception:
|
|
|
|
|
logger.exception("remove segment from index failed")
|
|
|
|
|
segment.enabled = True
|
|
|
|
|
session.commit()
|
|
|
|
|
finally:
|
|
|
|
|
redis_client.delete(indexing_cache_key)
|