mirror of
https://github.com/langgenius/dify.git
synced 2025-11-11 08:53:17 +00:00
feat(datasource): change datasource result type to event-stream
This commit is contained in:
parent
02ae479636
commit
82d0a70cb4
@ -213,10 +213,11 @@ class OnlineDocumentPage(BaseModel):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
page_id: str = Field(..., description="The page id")
|
page_id: str = Field(..., description="The page id")
|
||||||
page_title: str = Field(..., description="The page title")
|
page_name: str = Field(..., description="The page title")
|
||||||
page_icon: Optional[dict] = Field(None, description="The page icon")
|
page_icon: Optional[dict] = Field(None, description="The page icon")
|
||||||
type: str = Field(..., description="The type of the page")
|
type: str = Field(..., description="The type of the page")
|
||||||
last_edited_time: str = Field(..., description="The last edited time")
|
last_edited_time: str = Field(..., description="The last edited time")
|
||||||
|
parent_id: Optional[str] = Field(None, description="The parent page id")
|
||||||
|
|
||||||
|
|
||||||
class OnlineDocumentInfo(BaseModel):
|
class OnlineDocumentInfo(BaseModel):
|
||||||
|
|||||||
@ -135,7 +135,7 @@ class PluginDatasourceManager(BasePluginClient):
|
|||||||
|
|
||||||
datasource_provider_id = GenericProviderID(datasource_provider)
|
datasource_provider_id = GenericProviderID(datasource_provider)
|
||||||
|
|
||||||
response = self._request_with_plugin_daemon_response_stream(
|
return self._request_with_plugin_daemon_response_stream(
|
||||||
"POST",
|
"POST",
|
||||||
f"plugin/{tenant_id}/dispatch/datasource/get_online_document_pages",
|
f"plugin/{tenant_id}/dispatch/datasource/get_online_document_pages",
|
||||||
OnlineDocumentPagesMessage,
|
OnlineDocumentPagesMessage,
|
||||||
@ -153,7 +153,6 @@ class PluginDatasourceManager(BasePluginClient):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
yield from response
|
|
||||||
|
|
||||||
def get_online_document_page_content(
|
def get_online_document_page_content(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@ -11,11 +11,16 @@ class DatasourceStreamEvent(Enum):
|
|||||||
"""
|
"""
|
||||||
PROCESSING = "datasource_processing"
|
PROCESSING = "datasource_processing"
|
||||||
COMPLETED = "datasource_completed"
|
COMPLETED = "datasource_completed"
|
||||||
|
ERROR = "datasource_error"
|
||||||
|
|
||||||
|
|
||||||
class BaseDatasourceEvent(BaseModel):
|
class BaseDatasourceEvent(BaseModel):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
class DatasourceErrorEvent(BaseDatasourceEvent):
|
||||||
|
event: str = DatasourceStreamEvent.ERROR.value
|
||||||
|
error: str = Field(..., description="error message")
|
||||||
|
|
||||||
class DatasourceCompletedEvent(BaseDatasourceEvent):
|
class DatasourceCompletedEvent(BaseDatasourceEvent):
|
||||||
event: str = DatasourceStreamEvent.COMPLETED.value
|
event: str = DatasourceStreamEvent.COMPLETED.value
|
||||||
data: Mapping[str,Any] | list = Field(..., description="result")
|
data: Mapping[str,Any] | list = Field(..., description="result")
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
@ -21,7 +22,12 @@ from core.datasource.entities.datasource_entities import (
|
|||||||
)
|
)
|
||||||
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
|
from core.datasource.online_document.online_document_plugin import OnlineDocumentDatasourcePlugin
|
||||||
from core.datasource.website_crawl.website_crawl_plugin import WebsiteCrawlDatasourcePlugin
|
from core.datasource.website_crawl.website_crawl_plugin import WebsiteCrawlDatasourcePlugin
|
||||||
from core.rag.entities.event import BaseDatasourceEvent, DatasourceCompletedEvent, DatasourceProcessingEvent
|
from core.rag.entities.event import (
|
||||||
|
BaseDatasourceEvent,
|
||||||
|
DatasourceCompletedEvent,
|
||||||
|
DatasourceErrorEvent,
|
||||||
|
DatasourceProcessingEvent,
|
||||||
|
)
|
||||||
from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
|
from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.variables.variables import Variable
|
from core.variables.variables import Variable
|
||||||
from core.workflow.entities.node_entities import NodeRunResult
|
from core.workflow.entities.node_entities import NodeRunResult
|
||||||
@ -61,6 +67,7 @@ from services.entities.knowledge_entities.rag_pipeline_entities import (
|
|||||||
from services.errors.app import WorkflowHashNotEqualError
|
from services.errors.app import WorkflowHashNotEqualError
|
||||||
from services.rag_pipeline.pipeline_template.pipeline_template_factory import PipelineTemplateRetrievalFactory
|
from services.rag_pipeline.pipeline_template.pipeline_template_factory import PipelineTemplateRetrievalFactory
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class RagPipelineService:
|
class RagPipelineService:
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -430,93 +437,112 @@ class RagPipelineService:
|
|||||||
"""
|
"""
|
||||||
Run published workflow datasource
|
Run published workflow datasource
|
||||||
"""
|
"""
|
||||||
if is_published:
|
try:
|
||||||
# fetch published workflow by app_model
|
if is_published:
|
||||||
workflow = self.get_published_workflow(pipeline=pipeline)
|
# fetch published workflow by app_model
|
||||||
else:
|
workflow = self.get_published_workflow(pipeline=pipeline)
|
||||||
workflow = self.get_draft_workflow(pipeline=pipeline)
|
else:
|
||||||
if not workflow:
|
workflow = self.get_draft_workflow(pipeline=pipeline)
|
||||||
raise ValueError("Workflow not initialized")
|
if not workflow:
|
||||||
|
raise ValueError("Workflow not initialized")
|
||||||
|
|
||||||
# run draft workflow node
|
# run draft workflow node
|
||||||
datasource_node_data = None
|
datasource_node_data = None
|
||||||
start_at = time.perf_counter()
|
datasource_nodes = workflow.graph_dict.get("nodes", [])
|
||||||
datasource_nodes = workflow.graph_dict.get("nodes", [])
|
for datasource_node in datasource_nodes:
|
||||||
for datasource_node in datasource_nodes:
|
if datasource_node.get("id") == node_id:
|
||||||
if datasource_node.get("id") == node_id:
|
datasource_node_data = datasource_node.get("data", {})
|
||||||
datasource_node_data = datasource_node.get("data", {})
|
break
|
||||||
break
|
if not datasource_node_data:
|
||||||
if not datasource_node_data:
|
raise ValueError("Datasource node data not found")
|
||||||
raise ValueError("Datasource node data not found")
|
|
||||||
|
|
||||||
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
|
datasource_parameters = datasource_node_data.get("datasource_parameters", {})
|
||||||
for key, value in datasource_parameters.items():
|
for key, value in datasource_parameters.items():
|
||||||
if not user_inputs.get(key):
|
if not user_inputs.get(key):
|
||||||
user_inputs[key] = value["value"]
|
user_inputs[key] = value["value"]
|
||||||
|
|
||||||
from core.datasource.datasource_manager import DatasourceManager
|
from core.datasource.datasource_manager import DatasourceManager
|
||||||
|
|
||||||
datasource_runtime = DatasourceManager.get_datasource_runtime(
|
datasource_runtime = DatasourceManager.get_datasource_runtime(
|
||||||
provider_id=f"{datasource_node_data.get('plugin_id')}/{datasource_node_data.get('provider_name')}",
|
provider_id=f"{datasource_node_data.get('plugin_id')}/{datasource_node_data.get('provider_name')}",
|
||||||
datasource_name=datasource_node_data.get("datasource_name"),
|
datasource_name=datasource_node_data.get("datasource_name"),
|
||||||
tenant_id=pipeline.tenant_id,
|
tenant_id=pipeline.tenant_id,
|
||||||
datasource_type=DatasourceProviderType(datasource_type),
|
datasource_type=DatasourceProviderType(datasource_type),
|
||||||
)
|
)
|
||||||
datasource_provider_service = DatasourceProviderService()
|
datasource_provider_service = DatasourceProviderService()
|
||||||
credentials = datasource_provider_service.get_real_datasource_credentials(
|
credentials = datasource_provider_service.get_real_datasource_credentials(
|
||||||
tenant_id=pipeline.tenant_id,
|
tenant_id=pipeline.tenant_id,
|
||||||
provider=datasource_node_data.get("provider_name"),
|
provider=datasource_node_data.get("provider_name"),
|
||||||
plugin_id=datasource_node_data.get("plugin_id"),
|
plugin_id=datasource_node_data.get("plugin_id"),
|
||||||
)
|
)
|
||||||
if credentials:
|
if credentials:
|
||||||
datasource_runtime.runtime.credentials = credentials[0].get("credentials")
|
datasource_runtime.runtime.credentials = credentials[0].get("credentials")
|
||||||
match datasource_type:
|
match datasource_type:
|
||||||
case DatasourceProviderType.ONLINE_DOCUMENT:
|
case DatasourceProviderType.ONLINE_DOCUMENT:
|
||||||
datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime)
|
datasource_runtime = cast(OnlineDocumentDatasourcePlugin, datasource_runtime)
|
||||||
online_document_result: Generator[OnlineDocumentPagesMessage, None, None] = (
|
online_document_result: Generator[OnlineDocumentPagesMessage, None, None] = (
|
||||||
datasource_runtime.get_online_document_pages(
|
datasource_runtime.get_online_document_pages(
|
||||||
|
user_id=account.id,
|
||||||
|
datasource_parameters=user_inputs,
|
||||||
|
provider_type=datasource_runtime.datasource_provider_type(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
start_time = time.time()
|
||||||
|
start_event = DatasourceProcessingEvent(
|
||||||
|
total=0,
|
||||||
|
completed=0,
|
||||||
|
)
|
||||||
|
yield start_event.model_dump()
|
||||||
|
try:
|
||||||
|
for message in online_document_result:
|
||||||
|
end_time = time.time()
|
||||||
|
online_document_event = DatasourceCompletedEvent(
|
||||||
|
data=message.result,
|
||||||
|
time_consuming=round(end_time - start_time, 2)
|
||||||
|
)
|
||||||
|
yield online_document_event.model_dump()
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Error during online document.")
|
||||||
|
yield DatasourceErrorEvent(
|
||||||
|
error=str(e)
|
||||||
|
).model_dump()
|
||||||
|
case DatasourceProviderType.WEBSITE_CRAWL:
|
||||||
|
datasource_runtime = cast(WebsiteCrawlDatasourcePlugin, datasource_runtime)
|
||||||
|
website_crawl_result: Generator[WebsiteCrawlMessage, None, None] = (
|
||||||
|
datasource_runtime.get_website_crawl(
|
||||||
user_id=account.id,
|
user_id=account.id,
|
||||||
datasource_parameters=user_inputs,
|
datasource_parameters=user_inputs,
|
||||||
provider_type=datasource_runtime.datasource_provider_type(),
|
provider_type=datasource_runtime.datasource_provider_type(),
|
||||||
)
|
))
|
||||||
)
|
start_time = time.time()
|
||||||
start_time = time.time()
|
try:
|
||||||
for message in online_document_result:
|
for message in website_crawl_result:
|
||||||
end_time = time.time()
|
end_time = time.time()
|
||||||
online_document_event = DatasourceCompletedEvent(
|
if message.result.status == "completed":
|
||||||
data=message.result,
|
crawl_event = DatasourceCompletedEvent(
|
||||||
time_consuming=round(end_time - start_time, 2)
|
data=message.result.web_info_list,
|
||||||
)
|
total=message.result.total,
|
||||||
yield online_document_event.model_dump()
|
completed=message.result.completed,
|
||||||
|
time_consuming=round(end_time - start_time, 2)
|
||||||
case DatasourceProviderType.WEBSITE_CRAWL:
|
)
|
||||||
datasource_runtime = cast(WebsiteCrawlDatasourcePlugin, datasource_runtime)
|
else:
|
||||||
website_crawl_result: Generator[WebsiteCrawlMessage, None, None] = datasource_runtime.get_website_crawl(
|
crawl_event = DatasourceProcessingEvent(
|
||||||
user_id=account.id,
|
total=message.result.total,
|
||||||
datasource_parameters=user_inputs,
|
completed=message.result.completed,
|
||||||
provider_type=datasource_runtime.datasource_provider_type(),
|
)
|
||||||
)
|
yield crawl_event.model_dump()
|
||||||
start_time = time.time()
|
except Exception as e:
|
||||||
try:
|
logger.exception("Error during website crawl.")
|
||||||
for message in website_crawl_result:
|
yield DatasourceErrorEvent(
|
||||||
end_time = time.time()
|
error=str(e)
|
||||||
if message.result.status == "completed":
|
).model_dump()
|
||||||
crawl_event = DatasourceCompletedEvent(
|
case _:
|
||||||
data=message.result.web_info_list,
|
raise ValueError(f"Unsupported datasource provider: {datasource_runtime.datasource_provider_type}")
|
||||||
total=message.result.total,
|
except Exception as e:
|
||||||
completed=message.result.completed,
|
logger.exception("Error in run_datasource_workflow_node.")
|
||||||
time_consuming=round(end_time - start_time, 2)
|
yield DatasourceErrorEvent(
|
||||||
)
|
error=str(e)
|
||||||
else:
|
).model_dump()
|
||||||
crawl_event = DatasourceProcessingEvent(
|
|
||||||
total=message.result.total,
|
|
||||||
completed=message.result.completed,
|
|
||||||
)
|
|
||||||
yield crawl_event.model_dump()
|
|
||||||
except Exception as e:
|
|
||||||
print(str(e))
|
|
||||||
case _:
|
|
||||||
raise ValueError(f"Unsupported datasource provider: {datasource_runtime.datasource_provider_type}")
|
|
||||||
|
|
||||||
def run_free_workflow_node(
|
def run_free_workflow_node(
|
||||||
self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
|
self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user