chore: adapt deepset cloud sdk endpoint format for saving pipelines (#5969)

* chore: adapt to new endpoints formats

* docs: add release notes
This commit is contained in:
Tobias Wochinger 2023-10-05 08:56:28 +02:00 committed by GitHub
parent c2ec3f5fde
commit d5d3a9eef4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 80 additions and 8 deletions

View File

@ -1,12 +1,11 @@
import json
from mimetypes import guess_type
from pathlib import Path
from typing import Any, Dict, Generator, List, Optional, Tuple, Union, Literal
import logging
import os
import time
from enum import Enum
from mimetypes import guess_type
from pathlib import Path
from typing import Any, Dict, Generator, List, Literal, Optional, Tuple, Union
import pandas as pd
import requests
@ -560,10 +559,11 @@ class PipelineClient:
:param workspace: Specifies the name of the workspace on deepset Cloud.
:param headers: Headers to pass to the API call.
"""
config["name"] = pipeline_config_name
workspace_url = self._build_workspace_url(workspace=workspace)
pipelines_url = f"{workspace_url}/pipelines"
response = self.client.post(url=pipelines_url, data=yaml.dump(config), headers=headers).json()
response = self.client.post(
url=pipelines_url, json={"name": pipeline_config_name, "config": yaml.dump(config)}, headers=headers
).json()
if "name" not in response or response["name"] != pipeline_config_name:
logger.warning("Unexpected response from saving pipeline config: %s", response)
@ -582,7 +582,6 @@ class PipelineClient:
:param workspace: Specifies the name of the workspace on deepset Cloud.
:param headers: Headers to pass to the API call.
"""
config["name"] = pipeline_config_name
pipeline_url = self._build_pipeline_url(workspace=workspace, pipeline_config_name=pipeline_config_name)
yaml_url = f"{pipeline_url}/yaml"
response = self.client.put(url=yaml_url, data=yaml.dump(config), headers=headers).json()

View File

@ -0,0 +1,4 @@
---
enhancements:
- |
Update the deepset Cloud SDK to the new endpoint format for new saving pipeline configs.

View File

@ -1,6 +1,5 @@
{
"version": "ignore",
"name": "document_retrieval_1",
"components": [
{
"name": "DocumentStore",

View File

@ -0,0 +1,70 @@
import json
from pathlib import Path
from typing import Any, Dict
from unittest.mock import Mock
import pytest
import yaml
from haystack.utils.deepsetcloud import DeepsetCloudClient, PipelineClient
@pytest.fixture
def pipeline_config(samples_path: Path) -> Dict[str, Any]:
with (samples_path / "dc" / "pipeline_config.json").open() as f:
return json.load(f)
@pytest.fixture()
def mocked_client() -> Mock:
api_client = Mock(spec=DeepsetCloudClient)
api_client.build_workspace_url.return_value = "https://dc"
return api_client
@pytest.fixture()
def mock_success_response() -> Mock:
mock_response = Mock()
mock_response.json.return_value = {"name": "test_pipeline"}
return mock_response
class TestSaveConfig:
def test_save_config(
self, pipeline_config: Dict[str, Any], mocked_client: Mock, mock_success_response: Mock
) -> None:
mocked_client.post.return_value = mock_success_response
pipeline_name = "test_pipeline"
workspace_name = "test_workspace"
pipeline_client = PipelineClient(client=mocked_client)
pipeline_client.save_pipeline_config(
config=pipeline_config, pipeline_config_name=pipeline_name, workspace=workspace_name
)
expected_payload = {"name": pipeline_name, "config": yaml.dump(pipeline_config)}
mocked_client.post.assert_called_once_with(url="https://dc/pipelines", json=expected_payload, headers=None)
class TestUpdateConfig:
def test_update_config(
self, pipeline_config: Dict[str, Any], mocked_client: Mock, mock_success_response: Mock
) -> None:
mocked_client.put.return_value = mock_success_response
pipeline_name = "test_pipeline"
workspace_name = "test_workspace"
pipeline_client = PipelineClient(client=mocked_client)
pipeline_client.update_pipeline_config(
config=pipeline_config, pipeline_config_name=pipeline_name, workspace=workspace_name
)
mocked_client.put.assert_called_once_with(
url=f"https://dc/pipelines/{pipeline_name}/yaml", data=yaml.dump(pipeline_config), headers=None
)