| 
									
										
										
										
											2023-09-12 10:26:12 +08:00
										 |  |  | import logging | 
					
						
							| 
									
										
										
										
											2025-06-06 10:48:28 +08:00
										 |  |  | import secrets | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | from typing import cast | 
					
						
							| 
									
										
										
										
											2023-09-12 10:26:12 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-04-08 18:51:46 +08:00
										 |  |  | from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | from core.entities import DEFAULT_PLUGIN_ID | 
					
						
							|  |  |  | from core.model_runtime.entities.model_entities import ModelType | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  | from core.model_runtime.errors.invoke import InvokeBadRequestError | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | from core.model_runtime.model_providers.__base.moderation_model import ModerationModel | 
					
						
							|  |  |  | from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  | from extensions.ext_hosting_provider import hosting_configuration | 
					
						
							| 
									
										
										
										
											2023-09-12 10:26:12 +08:00
										 |  |  | from models.provider import ProviderType | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  | logger = logging.getLogger(__name__) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  | def check_moderation(tenant_id: str, model_config: ModelConfigWithCredentialsEntity, text: str) -> bool: | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |     moderation_config = hosting_configuration.moderation_config | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |     openai_provider_name = f"{DEFAULT_PLUGIN_ID}/openai/openai" | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |     if ( | 
					
						
							|  |  |  |         moderation_config | 
					
						
							|  |  |  |         and moderation_config.enabled is True | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |         and openai_provider_name in hosting_configuration.provider_map | 
					
						
							|  |  |  |         and hosting_configuration.provider_map[openai_provider_name].enabled is True | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |     ): | 
					
						
							|  |  |  |         using_provider_type = model_config.provider_model_bundle.configuration.using_provider_type | 
					
						
							|  |  |  |         provider_name = model_config.provider | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |         if using_provider_type == ProviderType.SYSTEM and provider_name in moderation_config.providers: | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |             hosting_openai_config = hosting_configuration.provider_map[openai_provider_name] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             if hosting_openai_config.credentials is None: | 
					
						
							|  |  |  |                 return False | 
					
						
							| 
									
										
										
										
											2023-09-12 10:26:12 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             # 2000 text per chunk | 
					
						
							|  |  |  |             length = 2000 | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |             text_chunks = [text[i : i + length] for i in range(0, len(text), length)] | 
					
						
							| 
									
										
										
										
											2023-09-18 17:32:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-11 17:11:20 +08:00
										 |  |  |             if len(text_chunks) == 0: | 
					
						
							|  |  |  |                 return True | 
					
						
							| 
									
										
										
										
											2023-09-18 17:32:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-06 10:48:28 +08:00
										 |  |  |             text_chunk = secrets.choice(text_chunks) | 
					
						
							| 
									
										
										
										
											2023-09-18 17:32:31 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-11 17:11:20 +08:00
										 |  |  |             try: | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                 model_provider_factory = ModelProviderFactory(tenant_id) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # Get model instance of LLM | 
					
						
							|  |  |  |                 model_type_instance = model_provider_factory.get_model_type_instance( | 
					
						
							|  |  |  |                     provider=openai_provider_name, model_type=ModelType.MODERATION | 
					
						
							|  |  |  |                 ) | 
					
						
							|  |  |  |                 model_type_instance = cast(ModerationModel, model_type_instance) | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                 moderation_result = model_type_instance.invoke( | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |                     model="omni-moderation-latest", credentials=hosting_openai_config.credentials, text=text_chunk | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |                 ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 if moderation_result is True: | 
					
						
							|  |  |  |                     return True | 
					
						
							| 
									
										
										
										
											2025-02-17 17:05:13 +08:00
										 |  |  |             except Exception: | 
					
						
							| 
									
										
										
										
											2024-11-15 15:41:40 +08:00
										 |  |  |                 logger.exception(f"Fails to check moderation, provider_name: {provider_name}") | 
					
						
							| 
									
										
										
										
											2024-09-10 17:00:20 +08:00
										 |  |  |                 raise InvokeBadRequestError("Rate limit exceeded, please try again later.") | 
					
						
							| 
									
										
										
										
											2023-09-12 10:26:12 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-01-02 23:42:00 +08:00
										 |  |  |     return False |