Fixes: Databricks httpPath Required (#20611)

* fix: made databricks httpPath required and added a migration file for the same

* fix: added sql migration in postDataMigration file and fix databricks tests

* fix: added httpPath in test_source_connection.py and test_source_parsing.py files

* fix: added httpPath in test_databricks_lineage.py

* fix: table name in postgres migration
This commit is contained in:
Keshav Mohta 2025-04-07 13:33:55 +05:30 committed by GitHub
parent d10bccceca
commit 0796c6274b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 60 additions and 9 deletions

View File

@ -1,3 +1,17 @@
UPDATE test_case
SET json = json_set(json, '$.createdBy', json->>'$.updatedBy')
WHERE json->>'$.createdBy' IS NULL;
-- Made httpPath a required field for Databricks, updating records
-- where httpPath is NULL or missing to an empty string.
UPDATE
dbservice_entity
SET
json = JSON_SET(json, '$.connection.config.httpPath', '')
WHERE
serviceType = 'Databricks'
AND (
JSON_CONTAINS_PATH(json, 'one', '$.connection.config.httpPath') = 0
OR JSON_UNQUOTE(json ->> '$.connection.config.httpPath') IS NULL
OR json ->> '$.connection.config.httpPath' = 'null'
);

View File

@ -1,3 +1,23 @@
UPDATE test_case
SET json = json || jsonb_build_object('createdBy', json->>'updatedBy')
WHERE json->>'createdBy' IS NULL;
-- Made httpPath a required field for Databricks, updating records
-- where httpPath is NULL or missing to an empty string.
UPDATE
dbservice_entity
SET
json = jsonb_set(
json,
'{connection,config,httpPath}',
'""' :: jsonb,
true
)
WHERE
serviceType = 'Databricks'
AND (
NOT jsonb_path_exists(json, '$.connection.config.httpPath')
OR (json -> 'connection' -> 'config' ->> 'httpPath') IS NULL
OR (json -> 'connection' -> 'config' ->> 'httpPath') = 'null'
);

View File

@ -87,6 +87,7 @@ mock_databricks_config = {
"config": {
"token": "random_token",
"hostPort": "localhost:443",
"httpPath": "sql/1.0/endpoints/path",
"connectionArguments": {
"http_path": "sql/1.0/endpoints/path",
},

View File

@ -129,6 +129,7 @@ class SourceConnectionTest(TestCase):
scheme=DatabricksScheme.databricks_connector,
hostPort="1.1.1.1:443",
token="KlivDTACWXKmZVfN1qIM",
httpPath="/sql/1.0/warehouses/abcdedfg",
)
assert expected_result == get_connection_url(databricks_conn_obj)
@ -144,6 +145,7 @@ class SourceConnectionTest(TestCase):
scheme=DatabricksScheme.databricks_connector,
hostPort="1.1.1.1:443",
token="KlivDTACWXKmZVfN1qIM",
httpPath="/sql/1.0/warehouses/abcdedfg",
)
assert expected_result == get_connection_url(databricks_conn_obj)

View File

@ -237,6 +237,7 @@ def test_databricks():
"config": {
"token": "<databricks token>",
"hostPort": "localhost:443",
"httpPath": "<http path of databricks cluster>",
"connectionArguments": {
"http_path": "<http path of databricks cluster>"
},

View File

@ -48,6 +48,7 @@ mock_databricks_config = {
"databaseSchema": "default",
"token": "123sawdtesttoken",
"hostPort": "localhost:443",
"httpPath": "/sql/1.0/warehouses/abcdedfg",
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
}
},

View File

@ -9,13 +9,17 @@
"databricksType": {
"description": "Service type.",
"type": "string",
"enum": ["Databricks"],
"enum": [
"Databricks"
],
"default": "Databricks"
},
"databricksScheme": {
"description": "SQLAlchemy driver scheme options.",
"type": "string",
"enum": ["databricks+connector"],
"enum": [
"databricks+connector"
],
"default": "databricks+connector"
}
},
@ -84,7 +88,9 @@
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
"default": {
"includes": [],
"excludes": ["^information_schema$"]
"excludes": [
"^information_schema$"
]
}
},
"tableFilterPattern": {
@ -98,7 +104,9 @@
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
"default": {
"includes": [],
"excludes": ["^system$"]
"excludes": [
"^system$"
]
}
},
"supportsUsageExtraction": {
@ -132,5 +140,9 @@
}
},
"additionalProperties": false,
"required": ["hostPort", "token"]
"required": [
"hostPort",
"token",
"httpPath"
]
}

View File

@ -45,7 +45,7 @@ export interface DatabricksConnection {
/**
* Databricks compute resources URL.
*/
httpPath?: string;
httpPath: string;
/**
* Table name to fetch the query history.
*/