mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-09-28 10:25:20 +00:00
* feat: databricks oauth and azure ad auth setup * refactor: add auth type changes in databricks.md * fix: test after oauth changes * refactor: unity catalog connection to databricks connection code
This commit is contained in:
parent
1c710ef5e3
commit
f1afe8f5f1
@ -5,3 +5,18 @@
|
|||||||
UPDATE profiler_data_time_series
|
UPDATE profiler_data_time_series
|
||||||
SET json = JSON_SET(json, '$.profileData', json->'$.profileData.profileData')
|
SET json = JSON_SET(json, '$.profileData', json->'$.profileData.profileData')
|
||||||
WHERE json->>'$.profileData.profileData' IS NOT NULL;
|
WHERE json->>'$.profileData.profileData' IS NOT NULL;
|
||||||
|
|
||||||
|
-- Migration script to restructure Databricks connection configuration
|
||||||
|
-- Move 'token' field from connection.config.token to connection.config.authType.token
|
||||||
|
UPDATE dbservice_entity
|
||||||
|
SET
|
||||||
|
json = JSON_SET (
|
||||||
|
JSON_REMOVE (json, '$.connection.config.token'),
|
||||||
|
'$.connection.config.authType',
|
||||||
|
JSON_OBJECT (
|
||||||
|
'token',
|
||||||
|
JSON_EXTRACT (json, '$.connection.config.token')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
WHERE
|
||||||
|
serviceType = 'Databricks';
|
||||||
|
@ -5,3 +5,15 @@
|
|||||||
UPDATE profiler_data_time_series
|
UPDATE profiler_data_time_series
|
||||||
SET json = jsonb_set(json::jsonb, '{profileData}', json::jsonb->'profileData'->'profileData')::json
|
SET json = jsonb_set(json::jsonb, '{profileData}', json::jsonb->'profileData'->'profileData')::json
|
||||||
WHERE json->'profileData'->>'profileData' IS NOT NULL;
|
WHERE json->'profileData'->>'profileData' IS NOT NULL;
|
||||||
|
|
||||||
|
-- Migration script to restructure Databricks connection configuration
|
||||||
|
-- Move 'token' field from connection.config.token to connection.config.authType.token
|
||||||
|
|
||||||
|
UPDATE dbservice_entity
|
||||||
|
SET json = jsonb_set(
|
||||||
|
json #- '{connection,config,token}',
|
||||||
|
'{connection,config,authType}',
|
||||||
|
jsonb_build_object('token', json #> '{connection,config,token}'),
|
||||||
|
true
|
||||||
|
)
|
||||||
|
WHERE serviceType = 'Databricks';
|
||||||
|
@ -5,7 +5,15 @@ source:
|
|||||||
config:
|
config:
|
||||||
catalog: hive_metastore
|
catalog: hive_metastore
|
||||||
databaseSchema: default
|
databaseSchema: default
|
||||||
|
|
||||||
|
authType:
|
||||||
token: <databricks token>
|
token: <databricks token>
|
||||||
|
# clientId: databricks service principal client id
|
||||||
|
# clientSecret: databricks service principal client secret
|
||||||
|
# azureClientSecret: azure client secret
|
||||||
|
# azureClientId: azure client id
|
||||||
|
# azureTenantId: azure tenant id
|
||||||
|
|
||||||
hostPort: localhost:443
|
hostPort: localhost:443
|
||||||
connectionTimeout: 120
|
connectionTimeout: 120
|
||||||
connectionArguments:
|
connectionArguments:
|
||||||
|
@ -12,9 +12,11 @@
|
|||||||
"""
|
"""
|
||||||
Source connection handler
|
Source connection handler
|
||||||
"""
|
"""
|
||||||
|
from copy import deepcopy
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from databricks.sdk.core import Config, azure_service_principal, oauth_service_principal
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.exc import DatabaseError
|
from sqlalchemy.exc import DatabaseError
|
||||||
from sqlalchemy.inspection import inspect
|
from sqlalchemy.inspection import inspect
|
||||||
@ -22,6 +24,15 @@ from sqlalchemy.inspection import inspect
|
|||||||
from metadata.generated.schema.entity.automations.workflow import (
|
from metadata.generated.schema.entity.automations.workflow import (
|
||||||
Workflow as AutomationWorkflow,
|
Workflow as AutomationWorkflow,
|
||||||
)
|
)
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.azureAdSetup import (
|
||||||
|
AzureAdSetup,
|
||||||
|
)
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||||
|
DatabricksOauth,
|
||||||
|
)
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||||
|
PersonalAccessToken,
|
||||||
|
)
|
||||||
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
||||||
DatabricksConnection,
|
DatabricksConnection,
|
||||||
)
|
)
|
||||||
@ -122,8 +133,49 @@ class DatabricksEngineWrapper:
|
|||||||
|
|
||||||
|
|
||||||
def get_connection_url(connection: DatabricksConnection) -> str:
|
def get_connection_url(connection: DatabricksConnection) -> str:
|
||||||
url = f"{connection.scheme.value}://token:{connection.token.get_secret_value()}@{connection.hostPort}"
|
return f"{connection.scheme.value}://{connection.hostPort}"
|
||||||
return url
|
|
||||||
|
|
||||||
|
def get_personal_access_token_auth(connection: DatabricksConnection) -> dict:
|
||||||
|
"""
|
||||||
|
Configure Personal Access Token authentication
|
||||||
|
"""
|
||||||
|
return {"access_token": connection.authType.token.get_secret_value()}
|
||||||
|
|
||||||
|
|
||||||
|
def get_databricks_oauth_auth(connection: DatabricksConnection):
|
||||||
|
"""
|
||||||
|
Create Databricks OAuth2 M2M credentials provider for Service Principal authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
def credential_provider():
|
||||||
|
hostname = connection.hostPort.split(":")[0]
|
||||||
|
config = Config(
|
||||||
|
host=f"https://{hostname}",
|
||||||
|
client_id=connection.authType.clientId,
|
||||||
|
client_secret=connection.authType.clientSecret.get_secret_value(),
|
||||||
|
)
|
||||||
|
return oauth_service_principal(config)
|
||||||
|
|
||||||
|
return {"credentials_provider": credential_provider}
|
||||||
|
|
||||||
|
|
||||||
|
def get_azure_ad_auth(connection: DatabricksConnection):
|
||||||
|
"""
|
||||||
|
Create Azure AD credentials provider for Azure Service Principal authentication
|
||||||
|
"""
|
||||||
|
|
||||||
|
def credential_provider():
|
||||||
|
hostname = connection.hostPort.split(":")[0]
|
||||||
|
config = Config(
|
||||||
|
host=f"https://{hostname}",
|
||||||
|
azure_client_secret=connection.authType.azureClientSecret.get_secret_value(),
|
||||||
|
azure_client_id=connection.authType.azureClientId,
|
||||||
|
azure_tenant_id=connection.authType.azureTenantId,
|
||||||
|
)
|
||||||
|
return azure_service_principal(config)
|
||||||
|
|
||||||
|
return {"credentials_provider": credential_provider}
|
||||||
|
|
||||||
|
|
||||||
def get_connection(connection: DatabricksConnection) -> Engine:
|
def get_connection(connection: DatabricksConnection) -> Engine:
|
||||||
@ -131,17 +183,39 @@ def get_connection(connection: DatabricksConnection) -> Engine:
|
|||||||
Create connection
|
Create connection
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if connection.httpPath:
|
|
||||||
if not connection.connectionArguments:
|
if not connection.connectionArguments:
|
||||||
connection.connectionArguments = init_empty_connection_arguments()
|
connection.connectionArguments = init_empty_connection_arguments()
|
||||||
|
|
||||||
|
# Add httpPath to connection arguments
|
||||||
|
if connection.httpPath:
|
||||||
connection.connectionArguments.root["http_path"] = connection.httpPath
|
connection.connectionArguments.root["http_path"] = connection.httpPath
|
||||||
|
|
||||||
return create_generic_db_connection(
|
auth_method = {
|
||||||
|
PersonalAccessToken: get_personal_access_token_auth,
|
||||||
|
DatabricksOauth: get_databricks_oauth_auth,
|
||||||
|
AzureAdSetup: get_azure_ad_auth,
|
||||||
|
}.get(type(connection.authType))
|
||||||
|
|
||||||
|
if not auth_method:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported authentication type: {type(connection.authType)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_args = auth_method(connection)
|
||||||
|
|
||||||
|
original_connection_arguments = connection.connectionArguments
|
||||||
|
connection.connectionArguments = deepcopy(original_connection_arguments)
|
||||||
|
connection.connectionArguments.root.update(auth_args)
|
||||||
|
|
||||||
|
engine = create_generic_db_connection(
|
||||||
connection=connection,
|
connection=connection,
|
||||||
get_connection_url_fn=get_connection_url,
|
get_connection_url_fn=get_connection_url,
|
||||||
get_connection_args_fn=get_connection_args_common,
|
get_connection_args_fn=get_connection_args_common,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
connection.connectionArguments = original_connection_arguments
|
||||||
|
return engine
|
||||||
|
|
||||||
|
|
||||||
def test_connection(
|
def test_connection(
|
||||||
metadata: OpenMetadata,
|
metadata: OpenMetadata,
|
||||||
|
@ -46,6 +46,11 @@ def _(*_, **__):
|
|||||||
return "SELECT SESSION_USER()"
|
return "SELECT SESSION_USER()"
|
||||||
|
|
||||||
|
|
||||||
|
@compiles(ConnTestFn, Dialects.Databricks)
|
||||||
|
def _(*_, **__):
|
||||||
|
return "SELECT '42'"
|
||||||
|
|
||||||
|
|
||||||
@compiles(ConnTestFn, Dialects.Db2)
|
@compiles(ConnTestFn, Dialects.Db2)
|
||||||
@compiles(ConnTestFn, Dialects.IbmDbSa)
|
@compiles(ConnTestFn, Dialects.IbmDbSa)
|
||||||
@compiles(ConnTestFn, Dialects.Ibmi)
|
@compiles(ConnTestFn, Dialects.Ibmi)
|
||||||
|
@ -15,6 +15,9 @@ supporting sqlalchemy abstraction layer
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
||||||
|
DatabricksConnection,
|
||||||
|
)
|
||||||
from metadata.sampler.sqlalchemy.databricks.sampler import DatabricksSamplerInterface
|
from metadata.sampler.sqlalchemy.databricks.sampler import DatabricksSamplerInterface
|
||||||
|
|
||||||
|
|
||||||
@ -24,4 +27,22 @@ class UnityCatalogSamplerInterface(DatabricksSamplerInterface):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
# Convert Unity Catalog connection to Databricks and move token to authType.
|
||||||
|
kwargs["service_connection_config"] = DatabricksConnection.model_validate(
|
||||||
|
{
|
||||||
|
**(
|
||||||
|
(
|
||||||
|
t := (
|
||||||
|
cfg := kwargs["service_connection_config"].model_dump(
|
||||||
|
mode="json"
|
||||||
|
)
|
||||||
|
).pop("token")
|
||||||
|
)
|
||||||
|
and cfg
|
||||||
|
),
|
||||||
|
"type": "Databricks",
|
||||||
|
"authType": {"token": t},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
@ -85,7 +85,7 @@ mock_databricks_config = {
|
|||||||
"serviceName": "local_databricks1",
|
"serviceName": "local_databricks1",
|
||||||
"serviceConnection": {
|
"serviceConnection": {
|
||||||
"config": {
|
"config": {
|
||||||
"token": "random_token",
|
"authType": {"token": "random_token"},
|
||||||
"hostPort": "localhost:443",
|
"hostPort": "localhost:443",
|
||||||
"httpPath": "sql/1.0/endpoints/path",
|
"httpPath": "sql/1.0/endpoints/path",
|
||||||
"connectionArguments": {
|
"connectionArguments": {
|
||||||
|
@ -30,6 +30,12 @@ from metadata.generated.schema.entity.services.connections.database.common.basic
|
|||||||
from metadata.generated.schema.entity.services.connections.database.common.jwtAuth import (
|
from metadata.generated.schema.entity.services.connections.database.common.jwtAuth import (
|
||||||
JwtAuth,
|
JwtAuth,
|
||||||
)
|
)
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||||
|
DatabricksOauth,
|
||||||
|
)
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||||
|
PersonalAccessToken,
|
||||||
|
)
|
||||||
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
||||||
DatabricksConnection,
|
DatabricksConnection,
|
||||||
DatabricksScheme,
|
DatabricksScheme,
|
||||||
@ -130,13 +136,11 @@ class SourceConnectionTest(TestCase):
|
|||||||
get_connection_url,
|
get_connection_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
expected_result = (
|
expected_result = "databricks+connector://1.1.1.1:443"
|
||||||
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
|
|
||||||
)
|
|
||||||
databricks_conn_obj = DatabricksConnection(
|
databricks_conn_obj = DatabricksConnection(
|
||||||
scheme=DatabricksScheme.databricks_connector,
|
scheme=DatabricksScheme.databricks_connector,
|
||||||
hostPort="1.1.1.1:443",
|
hostPort="1.1.1.1:443",
|
||||||
token="KlivDTACWXKmZVfN1qIM",
|
authType=PersonalAccessToken(token="KlivDTACWXKmZVfN1qIM"),
|
||||||
httpPath="/sql/1.0/warehouses/abcdedfg",
|
httpPath="/sql/1.0/warehouses/abcdedfg",
|
||||||
)
|
)
|
||||||
assert expected_result == get_connection_url(databricks_conn_obj)
|
assert expected_result == get_connection_url(databricks_conn_obj)
|
||||||
@ -146,14 +150,16 @@ class SourceConnectionTest(TestCase):
|
|||||||
get_connection_url,
|
get_connection_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
expected_result = (
|
expected_result = "databricks+connector://1.1.1.1:443"
|
||||||
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
|
|
||||||
)
|
|
||||||
databricks_conn_obj = DatabricksConnection(
|
databricks_conn_obj = DatabricksConnection(
|
||||||
scheme=DatabricksScheme.databricks_connector,
|
scheme=DatabricksScheme.databricks_connector,
|
||||||
hostPort="1.1.1.1:443",
|
hostPort="1.1.1.1:443",
|
||||||
token="KlivDTACWXKmZVfN1qIM",
|
authType=DatabricksOauth(
|
||||||
|
clientId="d40e2905-88ef-42ab-8898-fbefff2d071d",
|
||||||
|
clientSecret="secret-value",
|
||||||
|
),
|
||||||
httpPath="/sql/1.0/warehouses/abcdedfg",
|
httpPath="/sql/1.0/warehouses/abcdedfg",
|
||||||
|
catalog="main",
|
||||||
)
|
)
|
||||||
assert expected_result == get_connection_url(databricks_conn_obj)
|
assert expected_result == get_connection_url(databricks_conn_obj)
|
||||||
|
|
||||||
|
@ -235,7 +235,11 @@ def test_databricks():
|
|||||||
"serviceName": "local_databricks",
|
"serviceName": "local_databricks",
|
||||||
"serviceConnection": {
|
"serviceConnection": {
|
||||||
"config": {
|
"config": {
|
||||||
"token": "<databricks token>",
|
"authType": {
|
||||||
|
"azureClientId": "3df43ed7-5f2f-46bb-9793-384c6374a81d",
|
||||||
|
"azureClientSecret": "secret-value",
|
||||||
|
"azureTenantId": "3df43ed7-5g1f-46bb-9793-384c6374a81d",
|
||||||
|
},
|
||||||
"hostPort": "localhost:443",
|
"hostPort": "localhost:443",
|
||||||
"httpPath": "<http path of databricks cluster>",
|
"httpPath": "<http path of databricks cluster>",
|
||||||
"connectionArguments": {
|
"connectionArguments": {
|
||||||
|
@ -23,6 +23,9 @@ from metadata.generated.schema.api.data.createTable import CreateTableRequest
|
|||||||
from metadata.generated.schema.entity.data.database import Database
|
from metadata.generated.schema.entity.data.database import Database
|
||||||
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
||||||
from metadata.generated.schema.entity.data.table import Column, DataType, TableType
|
from metadata.generated.schema.entity.data.table import Column, DataType, TableType
|
||||||
|
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||||
|
PersonalAccessToken,
|
||||||
|
)
|
||||||
from metadata.generated.schema.entity.services.databaseService import (
|
from metadata.generated.schema.entity.services.databaseService import (
|
||||||
DatabaseConnection,
|
DatabaseConnection,
|
||||||
DatabaseService,
|
DatabaseService,
|
||||||
@ -45,7 +48,9 @@ mock_databricks_config = {
|
|||||||
"type": "Databricks",
|
"type": "Databricks",
|
||||||
"catalog": "hive_metastore",
|
"catalog": "hive_metastore",
|
||||||
"databaseSchema": "default",
|
"databaseSchema": "default",
|
||||||
|
"authType": {
|
||||||
"token": "123sawdtesttoken",
|
"token": "123sawdtesttoken",
|
||||||
|
},
|
||||||
"hostPort": "localhost:443",
|
"hostPort": "localhost:443",
|
||||||
"httpPath": "/sql/1.0/warehouses/abcdedfg",
|
"httpPath": "/sql/1.0/warehouses/abcdedfg",
|
||||||
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
|
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
|
||||||
@ -397,12 +402,12 @@ class DatabricksConnectionTest(TestCase):
|
|||||||
connection = self.DatabricksConnection(
|
connection = self.DatabricksConnection(
|
||||||
scheme=self.DatabricksScheme.databricks_connector,
|
scheme=self.DatabricksScheme.databricks_connector,
|
||||||
hostPort="test-host:443",
|
hostPort="test-host:443",
|
||||||
token="test-token",
|
authType=PersonalAccessToken(token="test-token"),
|
||||||
httpPath="/sql/1.0/warehouses/test",
|
httpPath="/sql/1.0/warehouses/test",
|
||||||
)
|
)
|
||||||
|
|
||||||
url = self.get_connection_url(connection)
|
url = self.get_connection_url(connection)
|
||||||
expected_url = "databricks+connector://token:test-token@test-host:443"
|
expected_url = "databricks+connector://test-host:443"
|
||||||
self.assertEqual(url, expected_url)
|
self.assertEqual(url, expected_url)
|
||||||
|
|
||||||
@patch(
|
@patch(
|
||||||
@ -413,7 +418,7 @@ class DatabricksConnectionTest(TestCase):
|
|||||||
connection = self.DatabricksConnection(
|
connection = self.DatabricksConnection(
|
||||||
scheme=self.DatabricksScheme.databricks_connector,
|
scheme=self.DatabricksScheme.databricks_connector,
|
||||||
hostPort="test-host:443",
|
hostPort="test-host:443",
|
||||||
token="test-token",
|
authType=PersonalAccessToken(token="test-token"),
|
||||||
httpPath="/sql/1.0/warehouses/test",
|
httpPath="/sql/1.0/warehouses/test",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -764,7 +769,7 @@ class DatabricksConnectionTest(TestCase):
|
|||||||
service_connection = DatabricksConnection(
|
service_connection = DatabricksConnection(
|
||||||
scheme=DatabricksScheme.databricks_connector,
|
scheme=DatabricksScheme.databricks_connector,
|
||||||
hostPort="test-host:443",
|
hostPort="test-host:443",
|
||||||
token="test-token",
|
authType=PersonalAccessToken(token="test-token"),
|
||||||
httpPath="/sql/1.0/warehouses/test",
|
httpPath="/sql/1.0/warehouses/test",
|
||||||
queryHistoryTable="test_table",
|
queryHistoryTable="test_table",
|
||||||
)
|
)
|
||||||
|
@ -30,6 +30,7 @@ import org.openmetadata.schema.services.connections.database.BigQueryConnection;
|
|||||||
import org.openmetadata.schema.services.connections.database.BigTableConnection;
|
import org.openmetadata.schema.services.connections.database.BigTableConnection;
|
||||||
import org.openmetadata.schema.services.connections.database.CassandraConnection;
|
import org.openmetadata.schema.services.connections.database.CassandraConnection;
|
||||||
import org.openmetadata.schema.services.connections.database.CockroachConnection;
|
import org.openmetadata.schema.services.connections.database.CockroachConnection;
|
||||||
|
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
|
||||||
import org.openmetadata.schema.services.connections.database.DatalakeConnection;
|
import org.openmetadata.schema.services.connections.database.DatalakeConnection;
|
||||||
import org.openmetadata.schema.services.connections.database.DeltaLakeConnection;
|
import org.openmetadata.schema.services.connections.database.DeltaLakeConnection;
|
||||||
import org.openmetadata.schema.services.connections.database.GreenplumConnection;
|
import org.openmetadata.schema.services.connections.database.GreenplumConnection;
|
||||||
@ -104,6 +105,7 @@ public final class ClassConverterFactory {
|
|||||||
Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()),
|
Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()),
|
||||||
Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()),
|
Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()),
|
||||||
Map.entry(RangerConnection.class, new RangerConnectionClassConverter()),
|
Map.entry(RangerConnection.class, new RangerConnectionClassConverter()),
|
||||||
|
Map.entry(DatabricksConnection.class, new DatabricksConnectionClassConverter()),
|
||||||
Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()),
|
Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()),
|
||||||
Map.entry(SSISConnection.class, new SsisConnectionClassConverter()),
|
Map.entry(SSISConnection.class, new SsisConnectionClassConverter()),
|
||||||
Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()));
|
Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()));
|
||||||
|
@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 Collate
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.openmetadata.service.secrets.converter;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
|
||||||
|
import org.openmetadata.schema.services.connections.database.databricks.AzureADSetup;
|
||||||
|
import org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth;
|
||||||
|
import org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken;
|
||||||
|
import org.openmetadata.schema.utils.JsonUtils;
|
||||||
|
|
||||||
|
/** Converter class to get a `DatabricksConnection` object. */
|
||||||
|
public class DatabricksConnectionClassConverter extends ClassConverter {
|
||||||
|
|
||||||
|
private static final List<Class<?>> CONFIG_SOURCE_CLASSES =
|
||||||
|
List.of(PersonalAccessToken.class, DatabricksOAuth.class, AzureADSetup.class);
|
||||||
|
|
||||||
|
public DatabricksConnectionClassConverter() {
|
||||||
|
super(DatabricksConnection.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object convert(Object object) {
|
||||||
|
DatabricksConnection databricksConnection =
|
||||||
|
(DatabricksConnection) JsonUtils.convertValue(object, this.clazz);
|
||||||
|
|
||||||
|
tryToConvert(databricksConnection.getAuthType(), CONFIG_SOURCE_CLASSES)
|
||||||
|
.ifPresent(databricksConnection::setAuthType);
|
||||||
|
|
||||||
|
return databricksConnection;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/azureAdSetup.json",
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Azure AD Setup",
|
||||||
|
"description": "Azure Active Directory authentication for Azure Databricks workspaces using Service Principal.",
|
||||||
|
"javaType": "org.openmetadata.schema.services.connections.database.databricks.AzureADSetup",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"azureClientId": {
|
||||||
|
"title": "Azure Client ID",
|
||||||
|
"description": "Azure Service Principal Application (client) ID registered in your Azure Active Directory.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"azureClientSecret": {
|
||||||
|
"title": "Azure Client Secret",
|
||||||
|
"description": "Azure Service Principal client secret created in Azure AD for authentication.",
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
},
|
||||||
|
"azureTenantId": {
|
||||||
|
"title": "Azure Tenant ID",
|
||||||
|
"description": "Azure Active Directory Tenant ID where your Service Principal is registered.",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"azureClientId",
|
||||||
|
"azureClientSecret",
|
||||||
|
"azureTenantId"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/databricksOAuth.json",
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Databricks OAuth",
|
||||||
|
"description": "OAuth2 Machine-to-Machine authentication using Service Principal credentials for Databricks.",
|
||||||
|
"javaType": "org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"clientId": {
|
||||||
|
"title": "Client ID",
|
||||||
|
"description": "Service Principal Application ID created in your Databricks Account Console for OAuth Machine-to-Machine authentication.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"clientSecret": {
|
||||||
|
"title": "Client Secret",
|
||||||
|
"description": "OAuth Secret generated for the Service Principal in Databricks Account Console. Used for secure OAuth2 authentication.",
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"clientId",
|
||||||
|
"clientSecret"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/personalAccessToken.json",
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Personal Access Token",
|
||||||
|
"description": "Personal Access Token authentication for Databricks.",
|
||||||
|
"javaType": "org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"token": {
|
||||||
|
"title": "Token",
|
||||||
|
"description": "Generated Personal Access Token for Databricks workspace authentication. This token is created from User Settings -> Developer -> Access Tokens in your Databricks workspace.",
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"token"
|
||||||
|
]
|
||||||
|
}
|
@ -41,11 +41,23 @@
|
|||||||
"description": "Host and port of the Databricks service.",
|
"description": "Host and port of the Databricks service.",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"token": {
|
"authType": {
|
||||||
"title": "Token",
|
"title": "Authentication Type",
|
||||||
"description": "Generated Token to connect to Databricks.",
|
"description": "Choose between different authentication types for Databricks.",
|
||||||
"type": "string",
|
"oneOf": [
|
||||||
"format": "password"
|
{
|
||||||
|
"title": "Personal Access Token",
|
||||||
|
"$ref": "./databricks/personalAccessToken.json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Databricks OAuth",
|
||||||
|
"$ref": "./databricks/databricksOAuth.json"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Azure AD Setup",
|
||||||
|
"$ref": "./databricks/azureAdSetup.json"
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"httpPath": {
|
"httpPath": {
|
||||||
"title": "Http Path",
|
"title": "Http Path",
|
||||||
@ -142,7 +154,7 @@
|
|||||||
"additionalProperties": false,
|
"additionalProperties": false,
|
||||||
"required": [
|
"required": [
|
||||||
"hostPort",
|
"hostPort",
|
||||||
"token",
|
"httpPath",
|
||||||
"httpPath"
|
"authType"
|
||||||
]
|
]
|
||||||
}
|
}
|
@ -32,9 +32,49 @@ This parameter specifies the host and port of the Databricks instance. This shou
|
|||||||
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:3000` as the value.
|
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:3000` as the value.
|
||||||
$$
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Authentication Type $(id="authType")
|
||||||
|
Select the authentication method to connect to your Databricks workspace.
|
||||||
|
|
||||||
|
- **Personal Access Token**: Generated Personal Access Token for Databricks workspace authentication.
|
||||||
|
|
||||||
|
- **Databricks OAuth**: OAuth2 Machine-to-Machine authentication using a Service Principal.
|
||||||
|
|
||||||
|
- **Azure AD Setup**: Specifically for Azure Databricks workspaces that use Azure Active Directory for identity management. Uses Azure Service Principal authentication through Azure AD.
|
||||||
|
$$
|
||||||
|
|
||||||
$$section
|
$$section
|
||||||
### Token $(id="token")
|
### Token $(id="token")
|
||||||
Generated Token to connect to Databricks. E.g., `dapw488e89a7176f7eb39bbc718617891564`.
|
Personal Access Token (PAT) for authenticating with Databricks workspace.
|
||||||
|
(e.g., `dapi1234567890abcdef`)
|
||||||
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Client ID $(id="clientId")
|
||||||
|
The Application ID of your Databricks Service Principal for OAuth2 authentication.
|
||||||
|
(e.g., `12345678-1234-1234-1234-123456789abc`)
|
||||||
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Client Secret $(id="clientSecret")
|
||||||
|
OAuth secret for the Databricks Service Principal.
|
||||||
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Azure Client ID $(id="azureClientId")
|
||||||
|
Azure Active Directory Application (client) ID for Azure Databricks authentication.
|
||||||
|
(e.g., `a1b2c3d4-e5f6-7890-abcd-ef1234567890`)
|
||||||
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Azure Client Secret $(id="azureClientSecret")
|
||||||
|
Secret key for the Azure AD Application.
|
||||||
|
$$
|
||||||
|
|
||||||
|
$$section
|
||||||
|
### Azure Tenant ID $(id="azureTenantId")
|
||||||
|
Your Azure Active Directory tenant identifier.
|
||||||
|
(e.g., `98765432-dcba-4321-abcd-1234567890ab`)
|
||||||
$$
|
$$
|
||||||
|
|
||||||
$$section
|
$$section
|
||||||
|
@ -930,6 +930,18 @@ export interface ConfigObject {
|
|||||||
* Establish secure connection with clickhouse
|
* Establish secure connection with clickhouse
|
||||||
*/
|
*/
|
||||||
secure?: boolean;
|
secure?: boolean;
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Choose Auth Config Type.
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the tableau instance
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the alation instance
|
||||||
|
*
|
||||||
|
* Authentication type to connect to Apache Ranger.
|
||||||
|
*/
|
||||||
|
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -1012,16 +1024,6 @@ export interface ConfigObject {
|
|||||||
* Authentication mode to connect to Impala.
|
* Authentication mode to connect to Impala.
|
||||||
*/
|
*/
|
||||||
authMechanism?: AuthMechanismEnum;
|
authMechanism?: AuthMechanismEnum;
|
||||||
/**
|
|
||||||
* Choose Auth Config Type.
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the tableau instance
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the alation instance
|
|
||||||
*
|
|
||||||
* Authentication type to connect to Apache Ranger.
|
|
||||||
*/
|
|
||||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
|
||||||
/**
|
/**
|
||||||
* Use slow logs to extract lineage.
|
* Use slow logs to extract lineage.
|
||||||
*/
|
*/
|
||||||
@ -2001,6 +2003,16 @@ export enum AuthProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -2033,7 +2045,34 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Configuration for connecting to Ranger Basic Auth.
|
* Configuration for connecting to Ranger Basic Auth.
|
||||||
*/
|
*/
|
||||||
export interface AuthConfigurationType {
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*
|
*
|
||||||
@ -2969,7 +3008,7 @@ export interface ConfigConnection {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
@ -3040,7 +3079,7 @@ export interface ConfigConnection {
|
|||||||
*
|
*
|
||||||
* Azure Database Connection Config
|
* Azure Database Connection Config
|
||||||
*/
|
*/
|
||||||
export interface ConnectionAuthConfigurationType {
|
export interface AuthConfigurationType {
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
@ -3457,7 +3496,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
|
@ -542,6 +542,12 @@ export interface ConfigObject {
|
|||||||
* Establish secure connection with clickhouse
|
* Establish secure connection with clickhouse
|
||||||
*/
|
*/
|
||||||
secure?: boolean;
|
secure?: boolean;
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Choose Auth Config Type.
|
||||||
|
*/
|
||||||
|
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -566,10 +572,6 @@ export interface ConfigObject {
|
|||||||
* Table name to fetch the query history.
|
* Table name to fetch the query history.
|
||||||
*/
|
*/
|
||||||
queryHistoryTable?: string;
|
queryHistoryTable?: string;
|
||||||
/**
|
|
||||||
* Generated Token to connect to Databricks.
|
|
||||||
*/
|
|
||||||
token?: string;
|
|
||||||
/**
|
/**
|
||||||
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
||||||
*/
|
*/
|
||||||
@ -622,10 +624,6 @@ export interface ConfigObject {
|
|||||||
* Authentication mode to connect to Impala.
|
* Authentication mode to connect to Impala.
|
||||||
*/
|
*/
|
||||||
authMechanism?: AuthMechanismEnum;
|
authMechanism?: AuthMechanismEnum;
|
||||||
/**
|
|
||||||
* Choose Auth Config Type.
|
|
||||||
*/
|
|
||||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
|
||||||
/**
|
/**
|
||||||
* Use slow logs to extract lineage.
|
* Use slow logs to extract lineage.
|
||||||
*/
|
*/
|
||||||
@ -789,6 +787,10 @@ export interface ConfigObject {
|
|||||||
* Hostname of the Couchbase service.
|
* Hostname of the Couchbase service.
|
||||||
*/
|
*/
|
||||||
hostport?: string;
|
hostport?: string;
|
||||||
|
/**
|
||||||
|
* Generated Token to connect to Databricks.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
/**
|
/**
|
||||||
* Enable dataflow for ingestion
|
* Enable dataflow for ingestion
|
||||||
*/
|
*/
|
||||||
@ -909,6 +911,16 @@ export enum AuthMechanismEnum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -919,7 +931,34 @@ export enum AuthMechanismEnum {
|
|||||||
*
|
*
|
||||||
* Configuration for connecting to DataStax Astra DB in the cloud.
|
* Configuration for connecting to DataStax Astra DB in the cloud.
|
||||||
*/
|
*/
|
||||||
export interface AuthConfigurationType {
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
@ -1655,7 +1694,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
@ -1747,7 +1786,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
*
|
*
|
||||||
* Azure Database Connection Config
|
* Azure Database Connection Config
|
||||||
*/
|
*/
|
||||||
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
|
export interface AuthConfigurationType {
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
|
@ -2956,6 +2956,8 @@ export interface ConfigObject {
|
|||||||
/**
|
/**
|
||||||
* Types of methods used to authenticate to the tableau instance
|
* Types of methods used to authenticate to the tableau instance
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Types of methods used to authenticate to the alation instance
|
* Types of methods used to authenticate to the alation instance
|
||||||
@ -4062,6 +4064,16 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Access Token Auth Credentials
|
* Access Token Auth Credentials
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -4115,6 +4127,33 @@ export interface AuthenticationTypeForTableau {
|
|||||||
* Personal Access Token Secret.
|
* Personal Access Token Secret.
|
||||||
*/
|
*/
|
||||||
personalAccessTokenSecret?: string;
|
personalAccessTokenSecret?: string;
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
awsConfig?: AWSCredentials;
|
awsConfig?: AWSCredentials;
|
||||||
azureConfig?: AzureCredentials;
|
azureConfig?: AzureCredentials;
|
||||||
/**
|
/**
|
||||||
|
@ -812,6 +812,18 @@ export interface ConfigObject {
|
|||||||
* Establish secure connection with clickhouse
|
* Establish secure connection with clickhouse
|
||||||
*/
|
*/
|
||||||
secure?: boolean;
|
secure?: boolean;
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Choose Auth Config Type.
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the tableau instance
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the alation instance
|
||||||
|
*
|
||||||
|
* Authentication type to connect to Apache Ranger.
|
||||||
|
*/
|
||||||
|
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -894,16 +906,6 @@ export interface ConfigObject {
|
|||||||
* Authentication mode to connect to Impala.
|
* Authentication mode to connect to Impala.
|
||||||
*/
|
*/
|
||||||
authMechanism?: AuthMechanismEnum;
|
authMechanism?: AuthMechanismEnum;
|
||||||
/**
|
|
||||||
* Choose Auth Config Type.
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the tableau instance
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the alation instance
|
|
||||||
*
|
|
||||||
* Authentication type to connect to Apache Ranger.
|
|
||||||
*/
|
|
||||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
|
||||||
/**
|
/**
|
||||||
* Use slow logs to extract lineage.
|
* Use slow logs to extract lineage.
|
||||||
*/
|
*/
|
||||||
@ -1883,6 +1885,16 @@ export enum AuthProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -1915,7 +1927,34 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Configuration for connecting to Ranger Basic Auth.
|
* Configuration for connecting to Ranger Basic Auth.
|
||||||
*/
|
*/
|
||||||
export interface AuthConfigurationType {
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*
|
*
|
||||||
@ -2851,7 +2890,7 @@ export interface ConfigConnection {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
@ -2922,7 +2961,7 @@ export interface ConfigConnection {
|
|||||||
*
|
*
|
||||||
* Azure Database Connection Config
|
* Azure Database Connection Config
|
||||||
*/
|
*/
|
||||||
export interface ConnectionAuthConfigurationType {
|
export interface AuthConfigurationType {
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
@ -3339,7 +3378,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
|
@ -1364,6 +1364,18 @@ export interface ConfigObject {
|
|||||||
* Establish secure connection with clickhouse
|
* Establish secure connection with clickhouse
|
||||||
*/
|
*/
|
||||||
secure?: boolean;
|
secure?: boolean;
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Choose Auth Config Type.
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the tableau instance
|
||||||
|
*
|
||||||
|
* Types of methods used to authenticate to the alation instance
|
||||||
|
*
|
||||||
|
* Authentication type to connect to Apache Ranger.
|
||||||
|
*/
|
||||||
|
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -1446,16 +1458,6 @@ export interface ConfigObject {
|
|||||||
* Authentication mode to connect to Impala.
|
* Authentication mode to connect to Impala.
|
||||||
*/
|
*/
|
||||||
authMechanism?: AuthMechanismEnum;
|
authMechanism?: AuthMechanismEnum;
|
||||||
/**
|
|
||||||
* Choose Auth Config Type.
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the tableau instance
|
|
||||||
*
|
|
||||||
* Types of methods used to authenticate to the alation instance
|
|
||||||
*
|
|
||||||
* Authentication type to connect to Apache Ranger.
|
|
||||||
*/
|
|
||||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
|
||||||
/**
|
/**
|
||||||
* Use slow logs to extract lineage.
|
* Use slow logs to extract lineage.
|
||||||
*/
|
*/
|
||||||
@ -2372,6 +2374,16 @@ export enum AuthMechanismEnum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -2404,7 +2416,34 @@ export enum AuthMechanismEnum {
|
|||||||
*
|
*
|
||||||
* Configuration for connecting to Ranger Basic Auth.
|
* Configuration for connecting to Ranger Basic Auth.
|
||||||
*/
|
*/
|
||||||
export interface AuthConfigurationType {
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*
|
*
|
||||||
@ -3310,7 +3349,7 @@ export interface ConfigConnection {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
@ -3381,7 +3420,7 @@ export interface ConfigConnection {
|
|||||||
*
|
*
|
||||||
* Azure Database Connection Config
|
* Azure Database Connection Config
|
||||||
*/
|
*/
|
||||||
export interface ConnectionAuthConfigurationType {
|
export interface AuthConfigurationType {
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
@ -3787,7 +3826,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: ConnectionAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
|
@ -0,0 +1,30 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 Collate.
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*/
|
||||||
|
export interface AzureAdSetup {
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId: string;
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 Collate.
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*/
|
||||||
|
export interface DatabricksOAuth {
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret: string;
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2025 Collate.
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*/
|
||||||
|
export interface PersonalAccessToken {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token: string;
|
||||||
|
}
|
@ -14,6 +14,10 @@
|
|||||||
* Databricks Connection Config
|
* Databricks Connection Config
|
||||||
*/
|
*/
|
||||||
export interface DatabricksConnection {
|
export interface DatabricksConnection {
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*/
|
||||||
|
authType: AuthenticationType;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -70,16 +74,53 @@ export interface DatabricksConnection {
|
|||||||
* Regex to only include/exclude tables that matches the pattern.
|
* Regex to only include/exclude tables that matches the pattern.
|
||||||
*/
|
*/
|
||||||
tableFilterPattern?: FilterPattern;
|
tableFilterPattern?: FilterPattern;
|
||||||
/**
|
|
||||||
* Generated Token to connect to Databricks.
|
|
||||||
*/
|
|
||||||
token: string;
|
|
||||||
/**
|
/**
|
||||||
* Service Type
|
* Service Type
|
||||||
*/
|
*/
|
||||||
type?: DatabricksType;
|
type?: DatabricksType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*/
|
||||||
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Regex to only include/exclude databases that matches the pattern.
|
* Regex to only include/exclude databases that matches the pattern.
|
||||||
*
|
*
|
||||||
|
@ -762,6 +762,8 @@ export interface ConfigObject {
|
|||||||
/**
|
/**
|
||||||
* Types of methods used to authenticate to the tableau instance
|
* Types of methods used to authenticate to the tableau instance
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Types of methods used to authenticate to the alation instance
|
* Types of methods used to authenticate to the alation instance
|
||||||
@ -1912,6 +1914,16 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Access Token Auth Credentials
|
* Access Token Auth Credentials
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -1965,6 +1977,33 @@ export interface AuthenticationTypeForTableau {
|
|||||||
* Personal Access Token Secret.
|
* Personal Access Token Secret.
|
||||||
*/
|
*/
|
||||||
personalAccessTokenSecret?: string;
|
personalAccessTokenSecret?: string;
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
awsConfig?: AWSCredentials;
|
awsConfig?: AWSCredentials;
|
||||||
azureConfig?: AzureCredentials;
|
azureConfig?: AzureCredentials;
|
||||||
/**
|
/**
|
||||||
|
@ -661,6 +661,12 @@ export interface ConfigObject {
|
|||||||
* Establish secure connection with clickhouse
|
* Establish secure connection with clickhouse
|
||||||
*/
|
*/
|
||||||
secure?: boolean;
|
secure?: boolean;
|
||||||
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Choose Auth Config Type.
|
||||||
|
*/
|
||||||
|
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||||
/**
|
/**
|
||||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||||
@ -685,10 +691,6 @@ export interface ConfigObject {
|
|||||||
* Table name to fetch the query history.
|
* Table name to fetch the query history.
|
||||||
*/
|
*/
|
||||||
queryHistoryTable?: string;
|
queryHistoryTable?: string;
|
||||||
/**
|
|
||||||
* Generated Token to connect to Databricks.
|
|
||||||
*/
|
|
||||||
token?: string;
|
|
||||||
/**
|
/**
|
||||||
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
||||||
*/
|
*/
|
||||||
@ -741,10 +743,6 @@ export interface ConfigObject {
|
|||||||
* Authentication mode to connect to Impala.
|
* Authentication mode to connect to Impala.
|
||||||
*/
|
*/
|
||||||
authMechanism?: AuthMechanismEnum;
|
authMechanism?: AuthMechanismEnum;
|
||||||
/**
|
|
||||||
* Choose Auth Config Type.
|
|
||||||
*/
|
|
||||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
|
||||||
/**
|
/**
|
||||||
* Use slow logs to extract lineage.
|
* Use slow logs to extract lineage.
|
||||||
*/
|
*/
|
||||||
@ -908,6 +906,10 @@ export interface ConfigObject {
|
|||||||
* Hostname of the Couchbase service.
|
* Hostname of the Couchbase service.
|
||||||
*/
|
*/
|
||||||
hostport?: string;
|
hostport?: string;
|
||||||
|
/**
|
||||||
|
* Generated Token to connect to Databricks.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
/**
|
/**
|
||||||
* Enable dataflow for ingestion
|
* Enable dataflow for ingestion
|
||||||
*/
|
*/
|
||||||
@ -1028,6 +1030,16 @@ export enum AuthMechanismEnum {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -1038,7 +1050,34 @@ export enum AuthMechanismEnum {
|
|||||||
*
|
*
|
||||||
* Configuration for connecting to DataStax Astra DB in the cloud.
|
* Configuration for connecting to DataStax Astra DB in the cloud.
|
||||||
*/
|
*/
|
||||||
export interface AuthConfigurationType {
|
export interface AuthenticationType {
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
@ -1774,7 +1813,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
/**
|
/**
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*/
|
*/
|
||||||
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
|
authType?: AuthConfigurationType;
|
||||||
/**
|
/**
|
||||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||||
*/
|
*/
|
||||||
@ -1866,7 +1905,7 @@ export interface HiveMetastoreConnectionDetails {
|
|||||||
*
|
*
|
||||||
* Azure Database Connection Config
|
* Azure Database Connection Config
|
||||||
*/
|
*/
|
||||||
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
|
export interface AuthConfigurationType {
|
||||||
/**
|
/**
|
||||||
* Password to connect to source.
|
* Password to connect to source.
|
||||||
*/
|
*/
|
||||||
|
@ -3473,6 +3473,8 @@ export interface ConfigObject {
|
|||||||
/**
|
/**
|
||||||
* Types of methods used to authenticate to the tableau instance
|
* Types of methods used to authenticate to the tableau instance
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Types of methods used to authenticate to the alation instance
|
* Types of methods used to authenticate to the alation instance
|
||||||
@ -4560,6 +4562,16 @@ export enum AuthMechanismEnum {
|
|||||||
*
|
*
|
||||||
* Access Token Auth Credentials
|
* Access Token Auth Credentials
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -4613,6 +4625,33 @@ export interface AuthenticationTypeForTableau {
|
|||||||
* Personal Access Token Secret.
|
* Personal Access Token Secret.
|
||||||
*/
|
*/
|
||||||
personalAccessTokenSecret?: string;
|
personalAccessTokenSecret?: string;
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
awsConfig?: AWSCredentials;
|
awsConfig?: AWSCredentials;
|
||||||
azureConfig?: AzureCredentials;
|
azureConfig?: AzureCredentials;
|
||||||
/**
|
/**
|
||||||
|
@ -806,6 +806,8 @@ export interface ConfigObject {
|
|||||||
/**
|
/**
|
||||||
* Types of methods used to authenticate to the tableau instance
|
* Types of methods used to authenticate to the tableau instance
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Types of methods used to authenticate to the alation instance
|
* Types of methods used to authenticate to the alation instance
|
||||||
@ -1956,6 +1958,16 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Access Token Auth Credentials
|
* Access Token Auth Credentials
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -2009,6 +2021,33 @@ export interface AuthenticationTypeForTableau {
|
|||||||
* Personal Access Token Secret.
|
* Personal Access Token Secret.
|
||||||
*/
|
*/
|
||||||
personalAccessTokenSecret?: string;
|
personalAccessTokenSecret?: string;
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
awsConfig?: AWSCredentials;
|
awsConfig?: AWSCredentials;
|
||||||
azureConfig?: AzureCredentials;
|
azureConfig?: AzureCredentials;
|
||||||
/**
|
/**
|
||||||
|
@ -842,6 +842,8 @@ export interface ConfigObject {
|
|||||||
/**
|
/**
|
||||||
* Types of methods used to authenticate to the tableau instance
|
* Types of methods used to authenticate to the tableau instance
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Types of methods used to authenticate to the alation instance
|
* Types of methods used to authenticate to the alation instance
|
||||||
@ -2013,6 +2015,16 @@ export enum AuthProvider {
|
|||||||
*
|
*
|
||||||
* Access Token Auth Credentials
|
* Access Token Auth Credentials
|
||||||
*
|
*
|
||||||
|
* Choose between different authentication types for Databricks.
|
||||||
|
*
|
||||||
|
* Personal Access Token authentication for Databricks.
|
||||||
|
*
|
||||||
|
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||||
|
* Databricks.
|
||||||
|
*
|
||||||
|
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||||
|
* Principal.
|
||||||
|
*
|
||||||
* Choose Auth Config Type.
|
* Choose Auth Config Type.
|
||||||
*
|
*
|
||||||
* Common Database Connection Config
|
* Common Database Connection Config
|
||||||
@ -2066,6 +2078,33 @@ export interface AuthenticationTypeForTableau {
|
|||||||
* Personal Access Token Secret.
|
* Personal Access Token Secret.
|
||||||
*/
|
*/
|
||||||
personalAccessTokenSecret?: string;
|
personalAccessTokenSecret?: string;
|
||||||
|
/**
|
||||||
|
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||||
|
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||||
|
*/
|
||||||
|
token?: string;
|
||||||
|
/**
|
||||||
|
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||||
|
* Machine-to-Machine authentication.
|
||||||
|
*/
|
||||||
|
clientId?: string;
|
||||||
|
/**
|
||||||
|
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||||
|
* secure OAuth2 authentication.
|
||||||
|
*/
|
||||||
|
clientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||||
|
*/
|
||||||
|
azureClientId?: string;
|
||||||
|
/**
|
||||||
|
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||||
|
*/
|
||||||
|
azureClientSecret?: string;
|
||||||
|
/**
|
||||||
|
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||||
|
*/
|
||||||
|
azureTenantId?: string;
|
||||||
awsConfig?: AWSCredentials;
|
awsConfig?: AWSCredentials;
|
||||||
azureConfig?: AzureCredentials;
|
azureConfig?: AzureCredentials;
|
||||||
/**
|
/**
|
||||||
|
Loading…
x
Reference in New Issue
Block a user