mirror of
https://github.com/langgenius/dify.git
synced 2025-11-19 14:33:13 +00:00
Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: twwu <twwu@dify.ai> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: jyong <718720800@qq.com> Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com> Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com> Co-authored-by: quicksand <quicksandzn@gmail.com> Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com> Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com> Co-authored-by: zxhlyh <jasonapring2015@outlook.com> Co-authored-by: Yongtao Huang <yongtaoh2022@gmail.com> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Joel <iamjoel007@gmail.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: nite-knite <nkCoding@gmail.com> Co-authored-by: Hanqing Zhao <sherry9277@gmail.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: Harry <xh001x@hotmail.com>
126 lines
5.3 KiB
Python
126 lines
5.3 KiB
Python
import logging
|
|
import time
|
|
|
|
import click
|
|
from celery import shared_task
|
|
from sqlalchemy import select
|
|
|
|
from core.indexing_runner import IndexingRunner
|
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
|
from extensions.ext_database import db
|
|
from extensions.ext_redis import redis_client
|
|
from libs.datetime_utils import naive_utc_now
|
|
from models.account import Account, Tenant
|
|
from models.dataset import Dataset, Document, DocumentSegment
|
|
from services.feature_service import FeatureService
|
|
from services.rag_pipeline.rag_pipeline import RagPipelineService
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(queue="dataset")
|
|
def retry_document_indexing_task(dataset_id: str, document_ids: list[str], user_id: str):
|
|
"""
|
|
Async process document
|
|
:param dataset_id:
|
|
:param document_ids:
|
|
:param user_id:
|
|
|
|
Usage: retry_document_indexing_task.delay(dataset_id, document_ids, user_id)
|
|
"""
|
|
start_at = time.perf_counter()
|
|
try:
|
|
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
|
if not dataset:
|
|
logger.info(click.style(f"Dataset not found: {dataset_id}", fg="red"))
|
|
return
|
|
user = db.session.query(Account).where(Account.id == user_id).first()
|
|
if not user:
|
|
logger.info(click.style(f"User not found: {user_id}", fg="red"))
|
|
return
|
|
tenant = db.session.query(Tenant).where(Tenant.id == dataset.tenant_id).first()
|
|
if not tenant:
|
|
raise ValueError("Tenant not found")
|
|
user.current_tenant = tenant
|
|
|
|
for document_id in document_ids:
|
|
retry_indexing_cache_key = f"document_{document_id}_is_retried"
|
|
# check document limit
|
|
features = FeatureService.get_features(tenant.id)
|
|
try:
|
|
if features.billing.enabled:
|
|
vector_space = features.vector_space
|
|
if 0 < vector_space.limit <= vector_space.size:
|
|
raise ValueError(
|
|
"Your total number of documents plus the number of uploads have over the limit of "
|
|
"your subscription."
|
|
)
|
|
except Exception as e:
|
|
document = (
|
|
db.session.query(Document)
|
|
.where(Document.id == document_id, Document.dataset_id == dataset_id)
|
|
.first()
|
|
)
|
|
if document:
|
|
document.indexing_status = "error"
|
|
document.error = str(e)
|
|
document.stopped_at = naive_utc_now()
|
|
db.session.add(document)
|
|
db.session.commit()
|
|
redis_client.delete(retry_indexing_cache_key)
|
|
return
|
|
|
|
logger.info(click.style(f"Start retry document: {document_id}", fg="green"))
|
|
document = (
|
|
db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
|
)
|
|
if not document:
|
|
logger.info(click.style(f"Document not found: {document_id}", fg="yellow"))
|
|
return
|
|
try:
|
|
# clean old data
|
|
index_processor = IndexProcessorFactory(document.doc_form).init_index_processor()
|
|
|
|
segments = db.session.scalars(
|
|
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
|
|
).all()
|
|
if segments:
|
|
index_node_ids = [segment.index_node_id for segment in segments]
|
|
# delete from vector index
|
|
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
|
|
|
for segment in segments:
|
|
db.session.delete(segment)
|
|
db.session.commit()
|
|
|
|
document.indexing_status = "parsing"
|
|
document.processing_started_at = naive_utc_now()
|
|
db.session.add(document)
|
|
db.session.commit()
|
|
|
|
if dataset.runtime_mode == "rag_pipeline":
|
|
rag_pipeline_service = RagPipelineService()
|
|
rag_pipeline_service.retry_error_document(dataset, document, user)
|
|
else:
|
|
indexing_runner = IndexingRunner()
|
|
indexing_runner.run([document])
|
|
redis_client.delete(retry_indexing_cache_key)
|
|
except Exception as ex:
|
|
document.indexing_status = "error"
|
|
document.error = str(ex)
|
|
document.stopped_at = naive_utc_now()
|
|
db.session.add(document)
|
|
db.session.commit()
|
|
logger.info(click.style(str(ex), fg="yellow"))
|
|
redis_client.delete(retry_indexing_cache_key)
|
|
logger.exception("retry_document_indexing_task failed, document_id: %s", document_id)
|
|
end_at = time.perf_counter()
|
|
logger.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
|
except Exception as e:
|
|
logger.exception(
|
|
"retry_document_indexing_task failed, dataset_id: %s, document_ids: %s", dataset_id, document_ids
|
|
)
|
|
raise e
|
|
finally:
|
|
db.session.close()
|