mirror of
https://github.com/langgenius/dify.git
synced 2025-07-08 09:41:29 +00:00
r2
This commit is contained in:
parent
0d9991ec88
commit
a8b676ade0
@ -1,3 +1,5 @@
|
|||||||
|
import random
|
||||||
|
|
||||||
from flask import redirect, request
|
from flask import redirect, request
|
||||||
from flask_login import current_user # type: ignore
|
from flask_login import current_user # type: ignore
|
||||||
from flask_restful import ( # type: ignore
|
from flask_restful import ( # type: ignore
|
||||||
@ -109,7 +111,7 @@ class DatasourceAuth(Resource):
|
|||||||
provider=args["provider"],
|
provider=args["provider"],
|
||||||
plugin_id=args["plugin_id"],
|
plugin_id=args["plugin_id"],
|
||||||
credentials=args["credentials"],
|
credentials=args["credentials"],
|
||||||
name=args["name"],
|
name="test" + str(random.randint(1, 1000000)),
|
||||||
)
|
)
|
||||||
except CredentialsValidateFailedError as ex:
|
except CredentialsValidateFailedError as ex:
|
||||||
raise ValueError(str(ex))
|
raise ValueError(str(ex))
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import base64
|
import base64
|
||||||
|
from datetime import datetime
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
@ -91,6 +92,7 @@ class DatasourceFileManager:
|
|||||||
used=False,
|
used=False,
|
||||||
hash=hashlib.sha3_256(file_binary).hexdigest(),
|
hash=hashlib.sha3_256(file_binary).hexdigest(),
|
||||||
source_url="",
|
source_url="",
|
||||||
|
created_at=datetime.now(),
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(upload_file)
|
db.session.add(upload_file)
|
||||||
@ -138,6 +140,7 @@ class DatasourceFileManager:
|
|||||||
used=False,
|
used=False,
|
||||||
hash=hashlib.sha3_256(blob).hexdigest(),
|
hash=hashlib.sha3_256(blob).hexdigest(),
|
||||||
source_url=file_url,
|
source_url=file_url,
|
||||||
|
created_at=datetime.now(),
|
||||||
)
|
)
|
||||||
|
|
||||||
db.session.add(upload_file)
|
db.session.add(upload_file)
|
||||||
|
@ -147,10 +147,12 @@ class DatasourceNode(BaseNode[DatasourceNodeData]):
|
|||||||
provider_type=datasource_type,
|
provider_type=datasource_type,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
yield from self._transform_message(
|
yield from self._transform_datasource_file_message(
|
||||||
messages=online_drive_result,
|
messages=online_drive_result,
|
||||||
parameters_for_log=parameters_for_log,
|
parameters_for_log=parameters_for_log,
|
||||||
datasource_info=datasource_info,
|
datasource_info=datasource_info,
|
||||||
|
variable_pool=variable_pool,
|
||||||
|
datasource_type=datasource_type,
|
||||||
)
|
)
|
||||||
case DatasourceProviderType.WEBSITE_CRAWL:
|
case DatasourceProviderType.WEBSITE_CRAWL:
|
||||||
yield RunCompletedEvent(
|
yield RunCompletedEvent(
|
||||||
@ -466,3 +468,72 @@ class DatasourceNode(BaseNode[DatasourceNodeData]):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def version(cls) -> str:
|
def version(cls) -> str:
|
||||||
return "1"
|
return "1"
|
||||||
|
|
||||||
|
def _transform_datasource_file_message(
|
||||||
|
self,
|
||||||
|
messages: Generator[DatasourceMessage, None, None],
|
||||||
|
parameters_for_log: dict[str, Any],
|
||||||
|
datasource_info: dict[str, Any],
|
||||||
|
variable_pool: VariablePool,
|
||||||
|
datasource_type: DatasourceProviderType,
|
||||||
|
) -> Generator:
|
||||||
|
"""
|
||||||
|
Convert ToolInvokeMessages into tuple[plain_text, files]
|
||||||
|
"""
|
||||||
|
# transform message and handle file storage
|
||||||
|
message_stream = DatasourceFileMessageTransformer.transform_datasource_invoke_messages(
|
||||||
|
messages=messages,
|
||||||
|
user_id=self.user_id,
|
||||||
|
tenant_id=self.tenant_id,
|
||||||
|
conversation_id=None,
|
||||||
|
)
|
||||||
|
file = None
|
||||||
|
for message in message_stream:
|
||||||
|
if message.type == DatasourceMessage.MessageType.BINARY_LINK:
|
||||||
|
assert isinstance(message.message, DatasourceMessage.TextMessage)
|
||||||
|
|
||||||
|
url = message.message.text
|
||||||
|
if message.meta:
|
||||||
|
transfer_method = message.meta.get("transfer_method", FileTransferMethod.DATASOURCE_FILE)
|
||||||
|
else:
|
||||||
|
transfer_method = FileTransferMethod.DATASOURCE_FILE
|
||||||
|
|
||||||
|
datasource_file_id = str(url).split("/")[-1].split(".")[0]
|
||||||
|
|
||||||
|
with Session(db.engine) as session:
|
||||||
|
stmt = select(UploadFile).where(UploadFile.id == datasource_file_id)
|
||||||
|
datasource_file = session.scalar(stmt)
|
||||||
|
if datasource_file is None:
|
||||||
|
raise ToolFileError(f"Tool file {datasource_file_id} does not exist")
|
||||||
|
|
||||||
|
mapping = {
|
||||||
|
"datasource_file_id": datasource_file_id,
|
||||||
|
"type": file_factory.get_file_type_by_mime_type(datasource_file.mime_type),
|
||||||
|
"transfer_method": transfer_method,
|
||||||
|
"url": url,
|
||||||
|
}
|
||||||
|
file = file_factory.build_from_mapping(
|
||||||
|
mapping=mapping,
|
||||||
|
tenant_id=self.tenant_id,
|
||||||
|
)
|
||||||
|
variable_pool.add([self.node_id, "file"], [file])
|
||||||
|
for key, value in datasource_info.items():
|
||||||
|
# construct new key list
|
||||||
|
new_key_list = ["file", key]
|
||||||
|
self._append_variables_recursively(
|
||||||
|
variable_pool=variable_pool,
|
||||||
|
node_id=self.node_id,
|
||||||
|
variable_key_list=new_key_list,
|
||||||
|
variable_value=value,
|
||||||
|
)
|
||||||
|
yield RunCompletedEvent(
|
||||||
|
run_result=NodeRunResult(
|
||||||
|
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||||
|
inputs=parameters_for_log,
|
||||||
|
metadata={WorkflowNodeExecutionMetadataKey.DATASOURCE_INFO: datasource_info},
|
||||||
|
outputs={
|
||||||
|
"file_info": datasource_info,
|
||||||
|
"datasource_type": datasource_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user