| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | import logging | 
					
						
							|  |  |  | import time | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | import click | 
					
						
							|  |  |  | from celery import shared_task | 
					
						
							| 
									
										
										
										
											2024-02-06 13:21:13 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-22 23:31:57 +08:00
										 |  |  | from core.rag.index_processor.index_processor_factory import IndexProcessorFactory | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | from extensions.ext_database import db | 
					
						
							| 
									
										
										
										
											2024-02-06 13:21:13 +08:00
										 |  |  | from models.dataset import ( | 
					
						
							|  |  |  |     AppDatasetJoin, | 
					
						
							|  |  |  |     Dataset, | 
					
						
							|  |  |  |     DatasetProcessRule, | 
					
						
							|  |  |  |     DatasetQuery, | 
					
						
							|  |  |  |     Document, | 
					
						
							|  |  |  |     DocumentSegment, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-24 15:02:29 +08:00
										 |  |  | # Add import statement for ValueError | 
					
						
							| 
									
										
										
										
											2023-07-31 13:13:08 +08:00
										 |  |  | @shared_task(queue='dataset') | 
					
						
							| 
									
										
										
										
											2023-09-22 14:21:26 +08:00
										 |  |  | def clean_dataset_task(dataset_id: str, tenant_id: str, indexing_technique: str, | 
					
						
							| 
									
										
										
										
											2024-02-22 23:31:57 +08:00
										 |  |  |                        index_struct: str, collection_binding_id: str, doc_form: str): | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |     """
 | 
					
						
							|  |  |  |     Clean dataset when dataset deleted. | 
					
						
							|  |  |  |     :param dataset_id: dataset id | 
					
						
							|  |  |  |     :param tenant_id: tenant id | 
					
						
							|  |  |  |     :param indexing_technique: indexing technique | 
					
						
							|  |  |  |     :param index_struct: index struct dict | 
					
						
							| 
									
										
										
										
											2023-09-22 14:21:26 +08:00
										 |  |  |     :param collection_binding_id: collection binding id | 
					
						
							| 
									
										
										
										
											2024-02-22 23:31:57 +08:00
										 |  |  |     :param doc_form: dataset form | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |     Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     logging.info(click.style('Start clean dataset when dataset deleted: {}'.format(dataset_id), fg='green')) | 
					
						
							|  |  |  |     start_at = time.perf_counter() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     try: | 
					
						
							|  |  |  |         dataset = Dataset( | 
					
						
							|  |  |  |             id=dataset_id, | 
					
						
							|  |  |  |             tenant_id=tenant_id, | 
					
						
							|  |  |  |             indexing_technique=indexing_technique, | 
					
						
							| 
									
										
										
										
											2023-09-22 14:21:26 +08:00
										 |  |  |             index_struct=index_struct, | 
					
						
							| 
									
										
										
										
											2024-02-22 23:31:57 +08:00
										 |  |  |             collection_binding_id=collection_binding_id, | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         ) | 
					
						
							| 
									
										
										
										
											2023-07-08 17:29:56 +08:00
										 |  |  |         documents = db.session.query(Document).filter(Document.dataset_id == dataset_id).all() | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         segments = db.session.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset_id).all() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-11 23:57:38 +08:00
										 |  |  |         if documents is None or len(documents) == 0: | 
					
						
							|  |  |  |             logging.info(click.style('No documents found for dataset: {}'.format(dataset_id), fg='green')) | 
					
						
							| 
									
										
										
										
											2024-04-11 17:43:22 +08:00
										 |  |  |         else: | 
					
						
							|  |  |  |             logging.info(click.style('Cleaning documents for dataset: {}'.format(dataset_id), fg='green')) | 
					
						
							| 
									
										
										
										
											2024-04-24 15:02:29 +08:00
										 |  |  |             # Specify the index type before initializing the index processor | 
					
						
							|  |  |  |             if doc_form is None: | 
					
						
							|  |  |  |                 raise ValueError("Index type must be specified.") | 
					
						
							| 
									
										
										
										
											2024-04-11 17:43:22 +08:00
										 |  |  |             index_processor = IndexProcessorFactory(doc_form).init_index_processor() | 
					
						
							|  |  |  |             index_processor.clean(dataset, None) | 
					
						
							| 
									
										
										
										
											2024-03-11 23:57:38 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 17:43:22 +08:00
										 |  |  |             for document in documents: | 
					
						
							|  |  |  |                 db.session.delete(document) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-11 17:43:22 +08:00
										 |  |  |             for segment in segments: | 
					
						
							|  |  |  |                 db.session.delete(segment) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete() | 
					
						
							|  |  |  |         db.session.query(DatasetQuery).filter(DatasetQuery.dataset_id == dataset_id).delete() | 
					
						
							|  |  |  |         db.session.query(AppDatasetJoin).filter(AppDatasetJoin.dataset_id == dataset_id).delete() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         db.session.commit() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         end_at = time.perf_counter() | 
					
						
							|  |  |  |         logging.info( | 
					
						
							|  |  |  |             click.style('Cleaned dataset when dataset deleted: {} latency: {}'.format(dataset_id, end_at - start_at), fg='green')) | 
					
						
							|  |  |  |     except Exception: | 
					
						
							|  |  |  |         logging.exception("Cleaned dataset when dataset deleted failed") |