| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  | import uuid | 
					
						
							| 
									
										
										
										
											2024-04-12 16:22:24 +08:00
										 |  |  | from datetime import datetime, timezone | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-12 12:34:01 +08:00
										 |  |  | import pandas as pd | 
					
						
							| 
									
										
										
										
											2024-02-06 13:21:13 +08:00
										 |  |  | from flask import request | 
					
						
							|  |  |  | from flask_login import current_user | 
					
						
							|  |  |  | from flask_restful import Resource, marshal, reqparse | 
					
						
							|  |  |  | from werkzeug.exceptions import Forbidden, NotFound | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | import services | 
					
						
							|  |  |  | from controllers.console import api | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  | from controllers.console.app.error import ProviderNotInitializeError | 
					
						
							|  |  |  | from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | from controllers.console.setup import setup_required | 
					
						
							| 
									
										
										
										
											2024-04-02 17:55:49 +08:00
										 |  |  | from controllers.console.wraps import ( | 
					
						
							|  |  |  |     account_initialization_required, | 
					
						
							|  |  |  |     cloud_edition_billing_knowledge_limit_check, | 
					
						
							|  |  |  |     cloud_edition_billing_resource_check, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  | from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError | 
					
						
							|  |  |  | from core.model_manager import ModelManager | 
					
						
							|  |  |  | from core.model_runtime.entities.model_entities import ModelType | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | from extensions.ext_database import db | 
					
						
							|  |  |  | from extensions.ext_redis import redis_client | 
					
						
							| 
									
										
										
										
											2023-09-27 16:06:32 +08:00
										 |  |  | from fields.segment_fields import segment_fields | 
					
						
							| 
									
										
										
										
											2024-01-12 12:34:01 +08:00
										 |  |  | from libs.login import login_required | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | from models.dataset import DocumentSegment | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  | from services.dataset_service import DatasetService, DocumentService, SegmentService | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  | from tasks.batch_create_segment_to_index_task import batch_create_segment_to_index_task | 
					
						
							| 
									
										
										
										
											2024-01-12 12:34:01 +08:00
										 |  |  | from tasks.disable_segment_from_index_task import disable_segment_from_index_task | 
					
						
							|  |  |  | from tasks.enable_segment_to_index_task import enable_segment_to_index_task | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class DatasetDocumentSegmentListApi(Resource): | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							|  |  |  |     def get(self, dataset_id, document_id): | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         document_id = str(document_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             DatasetService.check_dataset_permission(dataset, current_user) | 
					
						
							|  |  |  |         except services.errors.account.NoPermissionError as e: | 
					
						
							|  |  |  |             raise Forbidden(str(e)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         document = DocumentService.get_document(dataset_id, document_id) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if not document: | 
					
						
							|  |  |  |             raise NotFound('Document not found.') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         parser = reqparse.RequestParser() | 
					
						
							|  |  |  |         parser.add_argument('last_id', type=str, default=None, location='args') | 
					
						
							|  |  |  |         parser.add_argument('limit', type=int, default=20, location='args') | 
					
						
							|  |  |  |         parser.add_argument('status', type=str, | 
					
						
							|  |  |  |                             action='append', default=[], location='args') | 
					
						
							|  |  |  |         parser.add_argument('hit_count_gte', type=int, | 
					
						
							|  |  |  |                             default=None, location='args') | 
					
						
							|  |  |  |         parser.add_argument('enabled', type=str, default='all', location='args') | 
					
						
							| 
									
										
										
										
											2023-06-07 00:45:25 +08:00
										 |  |  |         parser.add_argument('keyword', type=str, default=None, location='args') | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         args = parser.parse_args() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         last_id = args['last_id'] | 
					
						
							|  |  |  |         limit = min(args['limit'], 100) | 
					
						
							|  |  |  |         status_list = args['status'] | 
					
						
							|  |  |  |         hit_count_gte = args['hit_count_gte'] | 
					
						
							| 
									
										
										
										
											2023-06-07 00:45:25 +08:00
										 |  |  |         keyword = args['keyword'] | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         query = DocumentSegment.query.filter( | 
					
						
							|  |  |  |             DocumentSegment.document_id == str(document_id), | 
					
						
							|  |  |  |             DocumentSegment.tenant_id == current_user.current_tenant_id | 
					
						
							|  |  |  |         ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if last_id is not None: | 
					
						
							|  |  |  |             last_segment = DocumentSegment.query.get(str(last_id)) | 
					
						
							|  |  |  |             if last_segment: | 
					
						
							|  |  |  |                 query = query.filter( | 
					
						
							|  |  |  |                     DocumentSegment.position > last_segment.position) | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 return {'data': [], 'has_more': False, 'limit': limit}, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if status_list: | 
					
						
							|  |  |  |             query = query.filter(DocumentSegment.status.in_(status_list)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if hit_count_gte is not None: | 
					
						
							|  |  |  |             query = query.filter(DocumentSegment.hit_count >= hit_count_gte) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-06-07 00:45:25 +08:00
										 |  |  |         if keyword: | 
					
						
							|  |  |  |             query = query.where(DocumentSegment.content.ilike(f'%{keyword}%')) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         if args['enabled'].lower() != 'all': | 
					
						
							|  |  |  |             if args['enabled'].lower() == 'true': | 
					
						
							|  |  |  |                 query = query.filter(DocumentSegment.enabled == True) | 
					
						
							|  |  |  |             elif args['enabled'].lower() == 'false': | 
					
						
							|  |  |  |                 query = query.filter(DocumentSegment.enabled == False) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         total = query.count() | 
					
						
							|  |  |  |         segments = query.order_by(DocumentSegment.position).limit(limit + 1).all() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         has_more = False | 
					
						
							|  |  |  |         if len(segments) > limit: | 
					
						
							|  |  |  |             has_more = True | 
					
						
							|  |  |  |             segments = segments[:-1] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'data': marshal(segments, segment_fields), | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |             'doc_form': document.doc_form, | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |             'has_more': has_more, | 
					
						
							|  |  |  |             'limit': limit, | 
					
						
							|  |  |  |             'total': total | 
					
						
							|  |  |  |         }, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class DatasetDocumentSegmentApi(Resource): | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							| 
									
										
										
										
											2023-12-05 16:53:55 +08:00
										 |  |  |     @cloud_edition_billing_resource_check('vector_space') | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |     def patch(self, dataset_id, segment_id, action): | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         # check user's model setting | 
					
						
							|  |  |  |         DatasetService.check_dataset_model_setting(dataset) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         # The role of the current user in the ta table must be admin or owner | 
					
						
							| 
									
										
										
										
											2024-01-26 12:47:42 +08:00
										 |  |  |         if not current_user.is_admin_or_owner: | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |             raise Forbidden() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             DatasetService.check_dataset_permission(dataset, current_user) | 
					
						
							|  |  |  |         except services.errors.account.NoPermissionError as e: | 
					
						
							|  |  |  |             raise Forbidden(str(e)) | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         if dataset.indexing_technique == 'high_quality': | 
					
						
							|  |  |  |             # check embedding model setting | 
					
						
							|  |  |  |             try: | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                 model_manager = ModelManager() | 
					
						
							|  |  |  |                 model_manager.get_model_instance( | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                     tenant_id=current_user.current_tenant_id, | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                     provider=dataset.embedding_model_provider, | 
					
						
							|  |  |  |                     model_type=ModelType.TEXT_EMBEDDING, | 
					
						
							|  |  |  |                     model=dataset.embedding_model | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                 ) | 
					
						
							|  |  |  |             except LLMBadRequestError: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError( | 
					
						
							| 
									
										
										
										
											2024-02-08 14:11:10 +08:00
										 |  |  |                     "No Embedding Model available. Please configure a valid provider " | 
					
						
							|  |  |  |                     "in the Settings -> Model Provider.") | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |             except ProviderTokenNotInitError as ex: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError(ex.description) | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         segment = DocumentSegment.query.filter( | 
					
						
							|  |  |  |             DocumentSegment.id == str(segment_id), | 
					
						
							|  |  |  |             DocumentSegment.tenant_id == current_user.current_tenant_id | 
					
						
							|  |  |  |         ).first() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if not segment: | 
					
						
							|  |  |  |             raise NotFound('Segment not found.') | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-05 18:18:38 +08:00
										 |  |  |         if segment.status != 'completed': | 
					
						
							|  |  |  |             raise NotFound('Segment is not completed, enable or disable function is not allowed') | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |         document_indexing_cache_key = 'document_{}_indexing'.format(segment.document_id) | 
					
						
							|  |  |  |         cache_result = redis_client.get(document_indexing_cache_key) | 
					
						
							|  |  |  |         if cache_result is not None: | 
					
						
							|  |  |  |             raise InvalidActionError("Document is being indexed, please try again later") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         indexing_cache_key = 'segment_{}_indexing'.format(segment.id) | 
					
						
							|  |  |  |         cache_result = redis_client.get(indexing_cache_key) | 
					
						
							|  |  |  |         if cache_result is not None: | 
					
						
							|  |  |  |             raise InvalidActionError("Segment is being indexed, please try again later") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if action == "enable": | 
					
						
							|  |  |  |             if segment.enabled: | 
					
						
							|  |  |  |                 raise InvalidActionError("Segment is already enabled.") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             segment.enabled = True | 
					
						
							|  |  |  |             segment.disabled_at = None | 
					
						
							|  |  |  |             segment.disabled_by = None | 
					
						
							|  |  |  |             db.session.commit() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Set cache to prevent indexing the same segment multiple times | 
					
						
							|  |  |  |             redis_client.setex(indexing_cache_key, 600, 1) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |             enable_segment_to_index_task.delay(segment.id) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             return {'result': 'success'}, 200 | 
					
						
							|  |  |  |         elif action == "disable": | 
					
						
							|  |  |  |             if not segment.enabled: | 
					
						
							|  |  |  |                 raise InvalidActionError("Segment is already disabled.") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             segment.enabled = False | 
					
						
							| 
									
										
										
										
											2024-04-12 16:22:24 +08:00
										 |  |  |             segment.disabled_at = datetime.now(timezone.utc).replace(tzinfo=None) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  |             segment.disabled_by = current_user.id | 
					
						
							|  |  |  |             db.session.commit() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Set cache to prevent indexing the same segment multiple times | 
					
						
							|  |  |  |             redis_client.setex(indexing_cache_key, 600, 1) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |             disable_segment_from_index_task.delay(segment.id) | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             return {'result': 'success'}, 200 | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             raise InvalidActionError() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  | class DatasetDocumentSegmentAddApi(Resource): | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							| 
									
										
										
										
											2023-12-05 16:53:55 +08:00
										 |  |  |     @cloud_edition_billing_resource_check('vector_space') | 
					
						
							| 
									
										
										
										
											2024-04-02 17:55:49 +08:00
										 |  |  |     @cloud_edition_billing_knowledge_limit_check('add_segment') | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |     def post(self, dataset_id, document_id): | 
					
						
							|  |  |  |         # check dataset | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							|  |  |  |         # check document | 
					
						
							|  |  |  |         document_id = str(document_id) | 
					
						
							|  |  |  |         document = DocumentService.get_document(dataset_id, document_id) | 
					
						
							|  |  |  |         if not document: | 
					
						
							|  |  |  |             raise NotFound('Document not found.') | 
					
						
							|  |  |  |         # The role of the current user in the ta table must be admin or owner | 
					
						
							| 
									
										
										
										
											2024-01-26 12:47:42 +08:00
										 |  |  |         if not current_user.is_admin_or_owner: | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |             raise Forbidden() | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |         # check embedding model setting | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         if dataset.indexing_technique == 'high_quality': | 
					
						
							|  |  |  |             try: | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                 model_manager = ModelManager() | 
					
						
							|  |  |  |                 model_manager.get_model_instance( | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                     tenant_id=current_user.current_tenant_id, | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                     provider=dataset.embedding_model_provider, | 
					
						
							|  |  |  |                     model_type=ModelType.TEXT_EMBEDDING, | 
					
						
							|  |  |  |                     model=dataset.embedding_model | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                 ) | 
					
						
							|  |  |  |             except LLMBadRequestError: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError( | 
					
						
							| 
									
										
										
										
											2024-02-08 14:11:10 +08:00
										 |  |  |                     "No Embedding Model available. Please configure a valid provider " | 
					
						
							|  |  |  |                     "in the Settings -> Model Provider.") | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |             except ProviderTokenNotInitError as ex: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError(ex.description) | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |         try: | 
					
						
							|  |  |  |             DatasetService.check_dataset_permission(dataset, current_user) | 
					
						
							|  |  |  |         except services.errors.account.NoPermissionError as e: | 
					
						
							|  |  |  |             raise Forbidden(str(e)) | 
					
						
							|  |  |  |         # validate args | 
					
						
							|  |  |  |         parser = reqparse.RequestParser() | 
					
						
							|  |  |  |         parser.add_argument('content', type=str, required=True, nullable=False, location='json') | 
					
						
							|  |  |  |         parser.add_argument('answer', type=str, required=False, nullable=True, location='json') | 
					
						
							|  |  |  |         parser.add_argument('keywords', type=list, required=False, nullable=True, location='json') | 
					
						
							|  |  |  |         args = parser.parse_args() | 
					
						
							|  |  |  |         SegmentService.segment_create_args_validate(args, document) | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |         segment = SegmentService.create_segment(args, document, dataset) | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |         return { | 
					
						
							|  |  |  |             'data': marshal(segment, segment_fields), | 
					
						
							|  |  |  |             'doc_form': document.doc_form | 
					
						
							|  |  |  |         }, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class DatasetDocumentSegmentUpdateApi(Resource): | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							| 
									
										
										
										
											2023-12-05 16:53:55 +08:00
										 |  |  |     @cloud_edition_billing_resource_check('vector_space') | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |     def patch(self, dataset_id, document_id, segment_id): | 
					
						
							|  |  |  |         # check dataset | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         # check user's model setting | 
					
						
							|  |  |  |         DatasetService.check_dataset_model_setting(dataset) | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |         # check document | 
					
						
							|  |  |  |         document_id = str(document_id) | 
					
						
							|  |  |  |         document = DocumentService.get_document(dataset_id, document_id) | 
					
						
							|  |  |  |         if not document: | 
					
						
							|  |  |  |             raise NotFound('Document not found.') | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         if dataset.indexing_technique == 'high_quality': | 
					
						
							|  |  |  |             # check embedding model setting | 
					
						
							|  |  |  |             try: | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                 model_manager = ModelManager() | 
					
						
							|  |  |  |                 model_manager.get_model_instance( | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                     tenant_id=current_user.current_tenant_id, | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                     provider=dataset.embedding_model_provider, | 
					
						
							|  |  |  |                     model_type=ModelType.TEXT_EMBEDDING, | 
					
						
							|  |  |  |                     model=dataset.embedding_model | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |                 ) | 
					
						
							|  |  |  |             except LLMBadRequestError: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError( | 
					
						
							| 
									
										
										
										
											2024-02-08 14:11:10 +08:00
										 |  |  |                     "No Embedding Model available. Please configure a valid provider " | 
					
						
							|  |  |  |                     "in the Settings -> Model Provider.") | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |             except ProviderTokenNotInitError as ex: | 
					
						
							|  |  |  |                 raise ProviderNotInitializeError(ex.description) | 
					
						
							|  |  |  |             # check segment | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |         segment_id = str(segment_id) | 
					
						
							|  |  |  |         segment = DocumentSegment.query.filter( | 
					
						
							|  |  |  |             DocumentSegment.id == str(segment_id), | 
					
						
							|  |  |  |             DocumentSegment.tenant_id == current_user.current_tenant_id | 
					
						
							|  |  |  |         ).first() | 
					
						
							|  |  |  |         if not segment: | 
					
						
							|  |  |  |             raise NotFound('Segment not found.') | 
					
						
							|  |  |  |         # The role of the current user in the ta table must be admin or owner | 
					
						
							| 
									
										
										
										
											2024-01-26 12:47:42 +08:00
										 |  |  |         if not current_user.is_admin_or_owner: | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |             raise Forbidden() | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             DatasetService.check_dataset_permission(dataset, current_user) | 
					
						
							|  |  |  |         except services.errors.account.NoPermissionError as e: | 
					
						
							|  |  |  |             raise Forbidden(str(e)) | 
					
						
							|  |  |  |         # validate args | 
					
						
							|  |  |  |         parser = reqparse.RequestParser() | 
					
						
							|  |  |  |         parser.add_argument('content', type=str, required=True, nullable=False, location='json') | 
					
						
							|  |  |  |         parser.add_argument('answer', type=str, required=False, nullable=True, location='json') | 
					
						
							|  |  |  |         parser.add_argument('keywords', type=list, required=False, nullable=True, location='json') | 
					
						
							|  |  |  |         args = parser.parse_args() | 
					
						
							|  |  |  |         SegmentService.segment_create_args_validate(args, document) | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |         segment = SegmentService.update_segment(args, segment, document, dataset) | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  |         return { | 
					
						
							|  |  |  |             'data': marshal(segment, segment_fields), | 
					
						
							|  |  |  |             'doc_form': document.doc_form | 
					
						
							|  |  |  |         }, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							|  |  |  |     def delete(self, dataset_id, document_id, segment_id): | 
					
						
							|  |  |  |         # check dataset | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							| 
									
										
										
										
											2023-08-29 03:37:45 +08:00
										 |  |  |         # check user's model setting | 
					
						
							|  |  |  |         DatasetService.check_dataset_model_setting(dataset) | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |         # check document | 
					
						
							|  |  |  |         document_id = str(document_id) | 
					
						
							|  |  |  |         document = DocumentService.get_document(dataset_id, document_id) | 
					
						
							|  |  |  |         if not document: | 
					
						
							|  |  |  |             raise NotFound('Document not found.') | 
					
						
							|  |  |  |         # check segment | 
					
						
							|  |  |  |         segment_id = str(segment_id) | 
					
						
							|  |  |  |         segment = DocumentSegment.query.filter( | 
					
						
							|  |  |  |             DocumentSegment.id == str(segment_id), | 
					
						
							|  |  |  |             DocumentSegment.tenant_id == current_user.current_tenant_id | 
					
						
							|  |  |  |         ).first() | 
					
						
							|  |  |  |         if not segment: | 
					
						
							|  |  |  |             raise NotFound('Segment not found.') | 
					
						
							|  |  |  |         # The role of the current user in the ta table must be admin or owner | 
					
						
							| 
									
										
										
										
											2024-01-26 12:47:42 +08:00
										 |  |  |         if not current_user.is_admin_or_owner: | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |             raise Forbidden() | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             DatasetService.check_dataset_permission(dataset, current_user) | 
					
						
							|  |  |  |         except services.errors.account.NoPermissionError as e: | 
					
						
							|  |  |  |             raise Forbidden(str(e)) | 
					
						
							|  |  |  |         SegmentService.delete_segment(segment, document, dataset) | 
					
						
							|  |  |  |         return {'result': 'success'}, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class DatasetDocumentSegmentBatchImportApi(Resource): | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							| 
									
										
										
										
											2023-12-05 16:53:55 +08:00
										 |  |  |     @cloud_edition_billing_resource_check('vector_space') | 
					
						
							| 
									
										
										
										
											2024-04-02 17:55:49 +08:00
										 |  |  |     @cloud_edition_billing_knowledge_limit_check('add_segment') | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  |     def post(self, dataset_id, document_id): | 
					
						
							|  |  |  |         # check dataset | 
					
						
							|  |  |  |         dataset_id = str(dataset_id) | 
					
						
							|  |  |  |         dataset = DatasetService.get_dataset(dataset_id) | 
					
						
							|  |  |  |         if not dataset: | 
					
						
							|  |  |  |             raise NotFound('Dataset not found.') | 
					
						
							|  |  |  |         # check document | 
					
						
							|  |  |  |         document_id = str(document_id) | 
					
						
							|  |  |  |         document = DocumentService.get_document(dataset_id, document_id) | 
					
						
							|  |  |  |         if not document: | 
					
						
							|  |  |  |             raise NotFound('Document not found.') | 
					
						
							|  |  |  |         # get file from request | 
					
						
							|  |  |  |         file = request.files['file'] | 
					
						
							|  |  |  |         # check file | 
					
						
							|  |  |  |         if 'file' not in request.files: | 
					
						
							|  |  |  |             raise NoFileUploadedError() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if len(request.files) > 1: | 
					
						
							|  |  |  |             raise TooManyFilesError() | 
					
						
							|  |  |  |         # check file type | 
					
						
							|  |  |  |         if not file.filename.endswith('.csv'): | 
					
						
							|  |  |  |             raise ValueError("Invalid file type. Only CSV files are allowed") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             # Skip the first row | 
					
						
							|  |  |  |             df = pd.read_csv(file) | 
					
						
							|  |  |  |             result = [] | 
					
						
							|  |  |  |             for index, row in df.iterrows(): | 
					
						
							|  |  |  |                 if document.doc_form == 'qa_model': | 
					
						
							|  |  |  |                     data = {'content': row[0], 'answer': row[1]} | 
					
						
							|  |  |  |                 else: | 
					
						
							|  |  |  |                     data = {'content': row[0]} | 
					
						
							|  |  |  |                 result.append(data) | 
					
						
							|  |  |  |             if len(result) == 0: | 
					
						
							|  |  |  |                 raise ValueError("The CSV file is empty.") | 
					
						
							|  |  |  |             # async job | 
					
						
							|  |  |  |             job_id = str(uuid.uuid4()) | 
					
						
							|  |  |  |             indexing_cache_key = 'segment_batch_import_{}'.format(str(job_id)) | 
					
						
							|  |  |  |             # send batch add segments task | 
					
						
							|  |  |  |             redis_client.setnx(indexing_cache_key, 'waiting') | 
					
						
							|  |  |  |             batch_create_segment_to_index_task.delay(str(job_id), result, dataset_id, document_id, | 
					
						
							|  |  |  |                                                      current_user.current_tenant_id, current_user.id) | 
					
						
							|  |  |  |         except Exception as e: | 
					
						
							|  |  |  |             return {'error': str(e)}, 500 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'job_id': job_id, | 
					
						
							|  |  |  |             'job_status': 'waiting' | 
					
						
							|  |  |  |         }, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     @setup_required | 
					
						
							|  |  |  |     @login_required | 
					
						
							|  |  |  |     @account_initialization_required | 
					
						
							|  |  |  |     def get(self, job_id): | 
					
						
							|  |  |  |         job_id = str(job_id) | 
					
						
							|  |  |  |         indexing_cache_key = 'segment_batch_import_{}'.format(job_id) | 
					
						
							|  |  |  |         cache_result = redis_client.get(indexing_cache_key) | 
					
						
							|  |  |  |         if cache_result is None: | 
					
						
							|  |  |  |             raise ValueError("The job is not exist.") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'job_id': job_id, | 
					
						
							|  |  |  |             'job_status': cache_result.decode() | 
					
						
							|  |  |  |         }, 200 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-05-15 08:51:32 +08:00
										 |  |  | api.add_resource(DatasetDocumentSegmentListApi, | 
					
						
							|  |  |  |                  '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments') | 
					
						
							|  |  |  | api.add_resource(DatasetDocumentSegmentApi, | 
					
						
							|  |  |  |                  '/datasets/<uuid:dataset_id>/segments/<uuid:segment_id>/<string:action>') | 
					
						
							| 
									
										
										
										
											2023-07-28 20:47:15 +08:00
										 |  |  | api.add_resource(DatasetDocumentSegmentAddApi, | 
					
						
							|  |  |  |                  '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segment') | 
					
						
							|  |  |  | api.add_resource(DatasetDocumentSegmentUpdateApi, | 
					
						
							|  |  |  |                  '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/<uuid:segment_id>') | 
					
						
							| 
									
										
										
										
											2023-08-18 17:37:31 +08:00
										 |  |  | api.add_resource(DatasetDocumentSegmentBatchImportApi, | 
					
						
							|  |  |  |                  '/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/segments/batch_import', | 
					
						
							|  |  |  |                  '/datasets/batch_import_status/<uuid:job_id>') |