diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index c9f1f5e813..e7ac4ad883 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -141,6 +141,7 @@ class PipelineGenerator(BaseAppGenerator): document_id=document_id, datasource_type=datasource_type, datasource_info=json.dumps(datasource_info), + datasource_node_id=start_node_id, input_data=inputs, pipeline_id=pipeline.id, created_by=user.id, diff --git a/api/core/plugin/impl/datasource.py b/api/core/plugin/impl/datasource.py index bc32c9283d..a501a03332 100644 --- a/api/core/plugin/impl/datasource.py +++ b/api/core/plugin/impl/datasource.py @@ -180,7 +180,7 @@ class PluginDatasourceManager(BasePluginClient): "provider": datasource_provider_id.provider_name, "datasource": datasource_name, "credentials": credentials, - "page": datasource_parameters, + "page": datasource_parameters.model_dump(), }, }, headers={ diff --git a/api/core/workflow/nodes/datasource/datasource_node.py b/api/core/workflow/nodes/datasource/datasource_node.py index 17108e0a57..5c1d8523ff 100644 --- a/api/core/workflow/nodes/datasource/datasource_node.py +++ b/api/core/workflow/nodes/datasource/datasource_node.py @@ -29,6 +29,7 @@ from core.workflow.utils.variable_template_parser import VariableTemplateParser from extensions.ext_database import db from factories import file_factory from models.model import UploadFile +from services.datasource_provider_service import DatasourceProviderService from ...entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey from .entities import DatasourceNodeData @@ -100,13 +101,21 @@ class DatasourceNode(BaseNode[DatasourceNodeData]): match datasource_type: case DatasourceProviderType.ONLINE_DOCUMENT: datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime) + datasource_provider_service = DatasourceProviderService() + credentials = datasource_provider_service.get_real_datasource_credentials( + tenant_id=self.tenant_id, + provider=node_data.provider_name, + plugin_id=node_data.plugin_id, + ) + if credentials: + datasource_runtime.runtime.credentials = credentials[0].get("credentials") online_document_result: Generator[DatasourceMessage, None, None] = ( datasource_runtime.get_online_document_page_content( user_id=self.user_id, datasource_parameters=GetOnlineDocumentPageContentRequest( workspace_id=datasource_info.get("workspace_id"), page_id=datasource_info.get("page").get("page_id"), - type=datasource_info.get("type"), + type=datasource_info.get("page").get("type"), ), provider_type=datasource_type, ) diff --git a/api/migrations/versions/2025_06_19_1525-a1025f709c06_add_pipeline_info_8.py b/api/migrations/versions/2025_06_19_1525-a1025f709c06_add_pipeline_info_8.py new file mode 100644 index 0000000000..387aff54b0 --- /dev/null +++ b/api/migrations/versions/2025_06_19_1525-a1025f709c06_add_pipeline_info_8.py @@ -0,0 +1,33 @@ +"""add_pipeline_info_8 + +Revision ID: a1025f709c06 +Revises: 70a0fc0c013f +Create Date: 2025-06-19 15:25:41.263120 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'a1025f709c06' +down_revision = '70a0fc0c013f' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op: + batch_op.add_column(sa.Column('datasource_node_id', sa.String(length=255), nullable=False)) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('document_pipeline_execution_logs', schema=None) as batch_op: + batch_op.drop_column('datasource_node_id') + + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 16d1865a83..e8da241d26 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -1267,6 +1267,7 @@ class DocumentPipelineExecutionLog(Base): document_id = db.Column(StringUUID, nullable=False) datasource_type = db.Column(db.String(255), nullable=False) datasource_info = db.Column(db.Text, nullable=False) + datasource_node_id = db.Column(db.String(255), nullable=False) input_data = db.Column(db.JSON, nullable=False) created_by = db.Column(StringUUID, nullable=True) created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())