Fixes #23356: Databricks OAuth & Azure AD Auth (#23482)

* feat: databricks oauth and azure ad auth setup

* refactor: add auth type changes in databricks.md

* fix: test after oauth changes

* refactor: unity catalog connection to databricks connection code
This commit is contained in:
Keshav Mohta 2025-09-23 15:22:50 +05:30 committed by GitHub
parent 1c710ef5e3
commit f1afe8f5f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 945 additions and 109 deletions

View File

@ -4,4 +4,19 @@
-- `profileData`field back to the original state.
UPDATE profiler_data_time_series
SET json = JSON_SET(json, '$.profileData', json->'$.profileData.profileData')
WHERE json->>'$.profileData.profileData' IS NOT NULL;
WHERE json->>'$.profileData.profileData' IS NOT NULL;
-- Migration script to restructure Databricks connection configuration
-- Move 'token' field from connection.config.token to connection.config.authType.token
UPDATE dbservice_entity
SET
json = JSON_SET (
JSON_REMOVE (json, '$.connection.config.token'),
'$.connection.config.authType',
JSON_OBJECT (
'token',
JSON_EXTRACT (json, '$.connection.config.token')
)
)
WHERE
serviceType = 'Databricks';

View File

@ -4,4 +4,16 @@
-- `profileData`field back to the original state.
UPDATE profiler_data_time_series
SET json = jsonb_set(json::jsonb, '{profileData}', json::jsonb->'profileData'->'profileData')::json
WHERE json->'profileData'->>'profileData' IS NOT NULL;
WHERE json->'profileData'->>'profileData' IS NOT NULL;
-- Migration script to restructure Databricks connection configuration
-- Move 'token' field from connection.config.token to connection.config.authType.token
UPDATE dbservice_entity
SET json = jsonb_set(
json #- '{connection,config,token}',
'{connection,config,authType}',
jsonb_build_object('token', json #> '{connection,config,token}'),
true
)
WHERE serviceType = 'Databricks';

View File

@ -5,7 +5,15 @@ source:
config:
catalog: hive_metastore
databaseSchema: default
token: <databricks token>
authType:
token: <databricks token>
# clientId: databricks service principal client id
# clientSecret: databricks service principal client secret
# azureClientSecret: azure client secret
# azureClientId: azure client id
# azureTenantId: azure tenant id
hostPort: localhost:443
connectionTimeout: 120
connectionArguments:

View File

@ -12,9 +12,11 @@
"""
Source connection handler
"""
from copy import deepcopy
from functools import partial
from typing import Optional
from databricks.sdk.core import Config, azure_service_principal, oauth_service_principal
from sqlalchemy.engine import Engine
from sqlalchemy.exc import DatabaseError
from sqlalchemy.inspection import inspect
@ -22,6 +24,15 @@ from sqlalchemy.inspection import inspect
from metadata.generated.schema.entity.automations.workflow import (
Workflow as AutomationWorkflow,
)
from metadata.generated.schema.entity.services.connections.database.databricks.azureAdSetup import (
AzureAdSetup,
)
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
DatabricksOauth,
)
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
PersonalAccessToken,
)
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
DatabricksConnection,
)
@ -122,8 +133,49 @@ class DatabricksEngineWrapper:
def get_connection_url(connection: DatabricksConnection) -> str:
url = f"{connection.scheme.value}://token:{connection.token.get_secret_value()}@{connection.hostPort}"
return url
return f"{connection.scheme.value}://{connection.hostPort}"
def get_personal_access_token_auth(connection: DatabricksConnection) -> dict:
"""
Configure Personal Access Token authentication
"""
return {"access_token": connection.authType.token.get_secret_value()}
def get_databricks_oauth_auth(connection: DatabricksConnection):
"""
Create Databricks OAuth2 M2M credentials provider for Service Principal authentication
"""
def credential_provider():
hostname = connection.hostPort.split(":")[0]
config = Config(
host=f"https://{hostname}",
client_id=connection.authType.clientId,
client_secret=connection.authType.clientSecret.get_secret_value(),
)
return oauth_service_principal(config)
return {"credentials_provider": credential_provider}
def get_azure_ad_auth(connection: DatabricksConnection):
"""
Create Azure AD credentials provider for Azure Service Principal authentication
"""
def credential_provider():
hostname = connection.hostPort.split(":")[0]
config = Config(
host=f"https://{hostname}",
azure_client_secret=connection.authType.azureClientSecret.get_secret_value(),
azure_client_id=connection.authType.azureClientId,
azure_tenant_id=connection.authType.azureTenantId,
)
return azure_service_principal(config)
return {"credentials_provider": credential_provider}
def get_connection(connection: DatabricksConnection) -> Engine:
@ -131,17 +183,39 @@ def get_connection(connection: DatabricksConnection) -> Engine:
Create connection
"""
if not connection.connectionArguments:
connection.connectionArguments = init_empty_connection_arguments()
# Add httpPath to connection arguments
if connection.httpPath:
if not connection.connectionArguments:
connection.connectionArguments = init_empty_connection_arguments()
connection.connectionArguments.root["http_path"] = connection.httpPath
return create_generic_db_connection(
auth_method = {
PersonalAccessToken: get_personal_access_token_auth,
DatabricksOauth: get_databricks_oauth_auth,
AzureAdSetup: get_azure_ad_auth,
}.get(type(connection.authType))
if not auth_method:
raise ValueError(
f"Unsupported authentication type: {type(connection.authType)}"
)
auth_args = auth_method(connection)
original_connection_arguments = connection.connectionArguments
connection.connectionArguments = deepcopy(original_connection_arguments)
connection.connectionArguments.root.update(auth_args)
engine = create_generic_db_connection(
connection=connection,
get_connection_url_fn=get_connection_url,
get_connection_args_fn=get_connection_args_common,
)
connection.connectionArguments = original_connection_arguments
return engine
def test_connection(
metadata: OpenMetadata,

View File

@ -46,6 +46,11 @@ def _(*_, **__):
return "SELECT SESSION_USER()"
@compiles(ConnTestFn, Dialects.Databricks)
def _(*_, **__):
return "SELECT '42'"
@compiles(ConnTestFn, Dialects.Db2)
@compiles(ConnTestFn, Dialects.IbmDbSa)
@compiles(ConnTestFn, Dialects.Ibmi)

View File

@ -15,6 +15,9 @@ supporting sqlalchemy abstraction layer
"""
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
DatabricksConnection,
)
from metadata.sampler.sqlalchemy.databricks.sampler import DatabricksSamplerInterface
@ -24,4 +27,22 @@ class UnityCatalogSamplerInterface(DatabricksSamplerInterface):
"""
def __init__(self, *args, **kwargs):
# Convert Unity Catalog connection to Databricks and move token to authType.
kwargs["service_connection_config"] = DatabricksConnection.model_validate(
{
**(
(
t := (
cfg := kwargs["service_connection_config"].model_dump(
mode="json"
)
).pop("token")
)
and cfg
),
"type": "Databricks",
"authType": {"token": t},
}
)
super().__init__(*args, **kwargs)

View File

@ -85,7 +85,7 @@ mock_databricks_config = {
"serviceName": "local_databricks1",
"serviceConnection": {
"config": {
"token": "random_token",
"authType": {"token": "random_token"},
"hostPort": "localhost:443",
"httpPath": "sql/1.0/endpoints/path",
"connectionArguments": {

View File

@ -30,6 +30,12 @@ from metadata.generated.schema.entity.services.connections.database.common.basic
from metadata.generated.schema.entity.services.connections.database.common.jwtAuth import (
JwtAuth,
)
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
DatabricksOauth,
)
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
PersonalAccessToken,
)
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
DatabricksConnection,
DatabricksScheme,
@ -130,13 +136,11 @@ class SourceConnectionTest(TestCase):
get_connection_url,
)
expected_result = (
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
)
expected_result = "databricks+connector://1.1.1.1:443"
databricks_conn_obj = DatabricksConnection(
scheme=DatabricksScheme.databricks_connector,
hostPort="1.1.1.1:443",
token="KlivDTACWXKmZVfN1qIM",
authType=PersonalAccessToken(token="KlivDTACWXKmZVfN1qIM"),
httpPath="/sql/1.0/warehouses/abcdedfg",
)
assert expected_result == get_connection_url(databricks_conn_obj)
@ -146,14 +150,16 @@ class SourceConnectionTest(TestCase):
get_connection_url,
)
expected_result = (
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
)
expected_result = "databricks+connector://1.1.1.1:443"
databricks_conn_obj = DatabricksConnection(
scheme=DatabricksScheme.databricks_connector,
hostPort="1.1.1.1:443",
token="KlivDTACWXKmZVfN1qIM",
authType=DatabricksOauth(
clientId="d40e2905-88ef-42ab-8898-fbefff2d071d",
clientSecret="secret-value",
),
httpPath="/sql/1.0/warehouses/abcdedfg",
catalog="main",
)
assert expected_result == get_connection_url(databricks_conn_obj)

View File

@ -235,7 +235,11 @@ def test_databricks():
"serviceName": "local_databricks",
"serviceConnection": {
"config": {
"token": "<databricks token>",
"authType": {
"azureClientId": "3df43ed7-5f2f-46bb-9793-384c6374a81d",
"azureClientSecret": "secret-value",
"azureTenantId": "3df43ed7-5g1f-46bb-9793-384c6374a81d",
},
"hostPort": "localhost:443",
"httpPath": "<http path of databricks cluster>",
"connectionArguments": {

View File

@ -23,6 +23,9 @@ from metadata.generated.schema.api.data.createTable import CreateTableRequest
from metadata.generated.schema.entity.data.database import Database
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
from metadata.generated.schema.entity.data.table import Column, DataType, TableType
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
PersonalAccessToken,
)
from metadata.generated.schema.entity.services.databaseService import (
DatabaseConnection,
DatabaseService,
@ -45,7 +48,9 @@ mock_databricks_config = {
"type": "Databricks",
"catalog": "hive_metastore",
"databaseSchema": "default",
"token": "123sawdtesttoken",
"authType": {
"token": "123sawdtesttoken",
},
"hostPort": "localhost:443",
"httpPath": "/sql/1.0/warehouses/abcdedfg",
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
@ -397,12 +402,12 @@ class DatabricksConnectionTest(TestCase):
connection = self.DatabricksConnection(
scheme=self.DatabricksScheme.databricks_connector,
hostPort="test-host:443",
token="test-token",
authType=PersonalAccessToken(token="test-token"),
httpPath="/sql/1.0/warehouses/test",
)
url = self.get_connection_url(connection)
expected_url = "databricks+connector://token:test-token@test-host:443"
expected_url = "databricks+connector://test-host:443"
self.assertEqual(url, expected_url)
@patch(
@ -413,7 +418,7 @@ class DatabricksConnectionTest(TestCase):
connection = self.DatabricksConnection(
scheme=self.DatabricksScheme.databricks_connector,
hostPort="test-host:443",
token="test-token",
authType=PersonalAccessToken(token="test-token"),
httpPath="/sql/1.0/warehouses/test",
)
@ -764,7 +769,7 @@ class DatabricksConnectionTest(TestCase):
service_connection = DatabricksConnection(
scheme=DatabricksScheme.databricks_connector,
hostPort="test-host:443",
token="test-token",
authType=PersonalAccessToken(token="test-token"),
httpPath="/sql/1.0/warehouses/test",
queryHistoryTable="test_table",
)

View File

@ -30,6 +30,7 @@ import org.openmetadata.schema.services.connections.database.BigQueryConnection;
import org.openmetadata.schema.services.connections.database.BigTableConnection;
import org.openmetadata.schema.services.connections.database.CassandraConnection;
import org.openmetadata.schema.services.connections.database.CockroachConnection;
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
import org.openmetadata.schema.services.connections.database.DatalakeConnection;
import org.openmetadata.schema.services.connections.database.DeltaLakeConnection;
import org.openmetadata.schema.services.connections.database.GreenplumConnection;
@ -104,6 +105,7 @@ public final class ClassConverterFactory {
Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()),
Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()),
Map.entry(RangerConnection.class, new RangerConnectionClassConverter()),
Map.entry(DatabricksConnection.class, new DatabricksConnectionClassConverter()),
Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()),
Map.entry(SSISConnection.class, new SsisConnectionClassConverter()),
Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()));

View File

@ -0,0 +1,43 @@
/*
* Copyright 2021 Collate
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openmetadata.service.secrets.converter;
import java.util.List;
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
import org.openmetadata.schema.services.connections.database.databricks.AzureADSetup;
import org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth;
import org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken;
import org.openmetadata.schema.utils.JsonUtils;
/** Converter class to get a `DatabricksConnection` object. */
public class DatabricksConnectionClassConverter extends ClassConverter {
private static final List<Class<?>> CONFIG_SOURCE_CLASSES =
List.of(PersonalAccessToken.class, DatabricksOAuth.class, AzureADSetup.class);
public DatabricksConnectionClassConverter() {
super(DatabricksConnection.class);
}
@Override
public Object convert(Object object) {
DatabricksConnection databricksConnection =
(DatabricksConnection) JsonUtils.convertValue(object, this.clazz);
tryToConvert(databricksConnection.getAuthType(), CONFIG_SOURCE_CLASSES)
.ifPresent(databricksConnection::setAuthType);
return databricksConnection;
}
}

View File

@ -0,0 +1,32 @@
{
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/azureAdSetup.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Azure AD Setup",
"description": "Azure Active Directory authentication for Azure Databricks workspaces using Service Principal.",
"javaType": "org.openmetadata.schema.services.connections.database.databricks.AzureADSetup",
"type": "object",
"properties": {
"azureClientId": {
"title": "Azure Client ID",
"description": "Azure Service Principal Application (client) ID registered in your Azure Active Directory.",
"type": "string"
},
"azureClientSecret": {
"title": "Azure Client Secret",
"description": "Azure Service Principal client secret created in Azure AD for authentication.",
"type": "string",
"format": "password"
},
"azureTenantId": {
"title": "Azure Tenant ID",
"description": "Azure Active Directory Tenant ID where your Service Principal is registered.",
"type": "string"
}
},
"additionalProperties": false,
"required": [
"azureClientId",
"azureClientSecret",
"azureTenantId"
]
}

View File

@ -0,0 +1,26 @@
{
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/databricksOAuth.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Databricks OAuth",
"description": "OAuth2 Machine-to-Machine authentication using Service Principal credentials for Databricks.",
"javaType": "org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth",
"type": "object",
"properties": {
"clientId": {
"title": "Client ID",
"description": "Service Principal Application ID created in your Databricks Account Console for OAuth Machine-to-Machine authentication.",
"type": "string"
},
"clientSecret": {
"title": "Client Secret",
"description": "OAuth Secret generated for the Service Principal in Databricks Account Console. Used for secure OAuth2 authentication.",
"type": "string",
"format": "password"
}
},
"additionalProperties": false,
"required": [
"clientId",
"clientSecret"
]
}

View File

@ -0,0 +1,20 @@
{
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/personalAccessToken.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Personal Access Token",
"description": "Personal Access Token authentication for Databricks.",
"javaType": "org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken",
"type": "object",
"properties": {
"token": {
"title": "Token",
"description": "Generated Personal Access Token for Databricks workspace authentication. This token is created from User Settings -> Developer -> Access Tokens in your Databricks workspace.",
"type": "string",
"format": "password"
}
},
"additionalProperties": false,
"required": [
"token"
]
}

View File

@ -41,11 +41,23 @@
"description": "Host and port of the Databricks service.",
"type": "string"
},
"token": {
"title": "Token",
"description": "Generated Token to connect to Databricks.",
"type": "string",
"format": "password"
"authType": {
"title": "Authentication Type",
"description": "Choose between different authentication types for Databricks.",
"oneOf": [
{
"title": "Personal Access Token",
"$ref": "./databricks/personalAccessToken.json"
},
{
"title": "Databricks OAuth",
"$ref": "./databricks/databricksOAuth.json"
},
{
"title": "Azure AD Setup",
"$ref": "./databricks/azureAdSetup.json"
}
]
},
"httpPath": {
"title": "Http Path",
@ -142,7 +154,7 @@
"additionalProperties": false,
"required": [
"hostPort",
"token",
"httpPath"
"httpPath",
"authType"
]
}

View File

@ -32,9 +32,49 @@ This parameter specifies the host and port of the Databricks instance. This shou
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:3000` as the value.
$$
$$section
### Authentication Type $(id="authType")
Select the authentication method to connect to your Databricks workspace.
- **Personal Access Token**: Generated Personal Access Token for Databricks workspace authentication.
- **Databricks OAuth**: OAuth2 Machine-to-Machine authentication using a Service Principal.
- **Azure AD Setup**: Specifically for Azure Databricks workspaces that use Azure Active Directory for identity management. Uses Azure Service Principal authentication through Azure AD.
$$
$$section
### Token $(id="token")
Generated Token to connect to Databricks. E.g., `dapw488e89a7176f7eb39bbc718617891564`.
Personal Access Token (PAT) for authenticating with Databricks workspace.
(e.g., `dapi1234567890abcdef`)
$$
$$section
### Client ID $(id="clientId")
The Application ID of your Databricks Service Principal for OAuth2 authentication.
(e.g., `12345678-1234-1234-1234-123456789abc`)
$$
$$section
### Client Secret $(id="clientSecret")
OAuth secret for the Databricks Service Principal.
$$
$$section
### Azure Client ID $(id="azureClientId")
Azure Active Directory Application (client) ID for Azure Databricks authentication.
(e.g., `a1b2c3d4-e5f6-7890-abcd-ef1234567890`)
$$
$$section
### Azure Client Secret $(id="azureClientSecret")
Secret key for the Azure AD Application.
$$
$$section
### Azure Tenant ID $(id="azureTenantId")
Your Azure Active Directory tenant identifier.
(e.g., `98765432-dcba-4321-abcd-1234567890ab`)
$$
$$section

View File

@ -930,6 +930,18 @@ export interface ConfigObject {
* Establish secure connection with clickhouse
*/
secure?: boolean;
/**
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthenticationType | NoConfigAuthenticationTypes;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -1012,16 +1024,6 @@ export interface ConfigObject {
* Authentication mode to connect to Impala.
*/
authMechanism?: AuthMechanismEnum;
/**
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
/**
* Use slow logs to extract lineage.
*/
@ -2001,6 +2003,16 @@ export enum AuthProvider {
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -2033,7 +2045,34 @@ export enum AuthProvider {
*
* Configuration for connecting to Ranger Basic Auth.
*/
export interface AuthConfigurationType {
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
/**
* Password to connect to source.
*
@ -2969,7 +3008,7 @@ export interface ConfigConnection {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/
@ -3040,7 +3079,7 @@ export interface ConfigConnection {
*
* Azure Database Connection Config
*/
export interface ConnectionAuthConfigurationType {
export interface AuthConfigurationType {
/**
* Password to connect to source.
*/
@ -3457,7 +3496,7 @@ export interface HiveMetastoreConnectionDetails {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/

View File

@ -542,6 +542,12 @@ export interface ConfigObject {
* Establish secure connection with clickhouse
*/
secure?: boolean;
/**
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*/
authType?: AuthenticationType | NoConfigAuthenticationTypes;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -566,10 +572,6 @@ export interface ConfigObject {
* Table name to fetch the query history.
*/
queryHistoryTable?: string;
/**
* Generated Token to connect to Databricks.
*/
token?: string;
/**
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
*/
@ -622,10 +624,6 @@ export interface ConfigObject {
* Authentication mode to connect to Impala.
*/
authMechanism?: AuthMechanismEnum;
/**
* Choose Auth Config Type.
*/
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
/**
* Use slow logs to extract lineage.
*/
@ -789,6 +787,10 @@ export interface ConfigObject {
* Hostname of the Couchbase service.
*/
hostport?: string;
/**
* Generated Token to connect to Databricks.
*/
token?: string;
/**
* Enable dataflow for ingestion
*/
@ -909,6 +911,16 @@ export enum AuthMechanismEnum {
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -919,7 +931,34 @@ export enum AuthMechanismEnum {
*
* Configuration for connecting to DataStax Astra DB in the cloud.
*/
export interface AuthConfigurationType {
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
/**
* Password to connect to source.
*/
@ -1655,7 +1694,7 @@ export interface HiveMetastoreConnectionDetails {
/**
* Choose Auth Config Type.
*/
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/
@ -1747,7 +1786,7 @@ export interface HiveMetastoreConnectionDetails {
*
* Azure Database Connection Config
*/
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
export interface AuthConfigurationType {
/**
* Password to connect to source.
*/

View File

@ -2956,6 +2956,8 @@ export interface ConfigObject {
/**
* Types of methods used to authenticate to the tableau instance
*
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the alation instance
@ -4062,6 +4064,16 @@ export enum AuthProvider {
*
* Access Token Auth Credentials
*
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -4115,8 +4127,35 @@ export interface AuthenticationTypeForTableau {
* Personal Access Token Secret.
*/
personalAccessTokenSecret?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* JWT to connect to source.
*/

View File

@ -812,6 +812,18 @@ export interface ConfigObject {
* Establish secure connection with clickhouse
*/
secure?: boolean;
/**
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthenticationType | NoConfigAuthenticationTypes;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -894,16 +906,6 @@ export interface ConfigObject {
* Authentication mode to connect to Impala.
*/
authMechanism?: AuthMechanismEnum;
/**
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
/**
* Use slow logs to extract lineage.
*/
@ -1883,6 +1885,16 @@ export enum AuthProvider {
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -1915,7 +1927,34 @@ export enum AuthProvider {
*
* Configuration for connecting to Ranger Basic Auth.
*/
export interface AuthConfigurationType {
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
/**
* Password to connect to source.
*
@ -2851,7 +2890,7 @@ export interface ConfigConnection {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/
@ -2922,7 +2961,7 @@ export interface ConfigConnection {
*
* Azure Database Connection Config
*/
export interface ConnectionAuthConfigurationType {
export interface AuthConfigurationType {
/**
* Password to connect to source.
*/
@ -3339,7 +3378,7 @@ export interface HiveMetastoreConnectionDetails {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/

View File

@ -1364,6 +1364,18 @@ export interface ConfigObject {
* Establish secure connection with clickhouse
*/
secure?: boolean;
/**
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthenticationType | NoConfigAuthenticationTypes;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -1446,16 +1458,6 @@ export interface ConfigObject {
* Authentication mode to connect to Impala.
*/
authMechanism?: AuthMechanismEnum;
/**
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the tableau instance
*
* Types of methods used to authenticate to the alation instance
*
* Authentication type to connect to Apache Ranger.
*/
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
/**
* Use slow logs to extract lineage.
*/
@ -2372,6 +2374,16 @@ export enum AuthMechanismEnum {
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -2404,7 +2416,34 @@ export enum AuthMechanismEnum {
*
* Configuration for connecting to Ranger Basic Auth.
*/
export interface AuthConfigurationType {
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
/**
* Password to connect to source.
*
@ -3310,7 +3349,7 @@ export interface ConfigConnection {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/
@ -3381,7 +3420,7 @@ export interface ConfigConnection {
*
* Azure Database Connection Config
*/
export interface ConnectionAuthConfigurationType {
export interface AuthConfigurationType {
/**
* Password to connect to source.
*/
@ -3787,7 +3826,7 @@ export interface HiveMetastoreConnectionDetails {
/**
* Choose Auth Config Type.
*/
authType?: ConnectionAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/

View File

@ -0,0 +1,30 @@
/*
* Copyright 2025 Collate.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*/
export interface AzureAdSetup {
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId: string;
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2025 Collate.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*/
export interface DatabricksOAuth {
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret: string;
}

View File

@ -0,0 +1,22 @@
/*
* Copyright 2025 Collate.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Personal Access Token authentication for Databricks.
*/
export interface PersonalAccessToken {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token: string;
}

View File

@ -14,6 +14,10 @@
* Databricks Connection Config
*/
export interface DatabricksConnection {
/**
* Choose between different authentication types for Databricks.
*/
authType: AuthenticationType;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -70,16 +74,53 @@ export interface DatabricksConnection {
* Regex to only include/exclude tables that matches the pattern.
*/
tableFilterPattern?: FilterPattern;
/**
* Generated Token to connect to Databricks.
*/
token: string;
/**
* Service Type
*/
type?: DatabricksType;
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*/
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
}
/**
* Regex to only include/exclude databases that matches the pattern.
*

View File

@ -762,6 +762,8 @@ export interface ConfigObject {
/**
* Types of methods used to authenticate to the tableau instance
*
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the alation instance
@ -1912,6 +1914,16 @@ export enum AuthProvider {
*
* Access Token Auth Credentials
*
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -1965,8 +1977,35 @@ export interface AuthenticationTypeForTableau {
* Personal Access Token Secret.
*/
personalAccessTokenSecret?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* JWT to connect to source.
*/

View File

@ -661,6 +661,12 @@ export interface ConfigObject {
* Establish secure connection with clickhouse
*/
secure?: boolean;
/**
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*/
authType?: AuthenticationType | NoConfigAuthenticationTypes;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
@ -685,10 +691,6 @@ export interface ConfigObject {
* Table name to fetch the query history.
*/
queryHistoryTable?: string;
/**
* Generated Token to connect to Databricks.
*/
token?: string;
/**
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
*/
@ -741,10 +743,6 @@ export interface ConfigObject {
* Authentication mode to connect to Impala.
*/
authMechanism?: AuthMechanismEnum;
/**
* Choose Auth Config Type.
*/
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
/**
* Use slow logs to extract lineage.
*/
@ -908,6 +906,10 @@ export interface ConfigObject {
* Hostname of the Couchbase service.
*/
hostport?: string;
/**
* Generated Token to connect to Databricks.
*/
token?: string;
/**
* Enable dataflow for ingestion
*/
@ -1028,6 +1030,16 @@ export enum AuthMechanismEnum {
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -1038,7 +1050,34 @@ export enum AuthMechanismEnum {
*
* Configuration for connecting to DataStax Astra DB in the cloud.
*/
export interface AuthConfigurationType {
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
/**
* Password to connect to source.
*/
@ -1774,7 +1813,7 @@ export interface HiveMetastoreConnectionDetails {
/**
* Choose Auth Config Type.
*/
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
authType?: AuthConfigurationType;
/**
* Custom OpenMetadata Classification name for Postgres policy tags.
*/
@ -1866,7 +1905,7 @@ export interface HiveMetastoreConnectionDetails {
*
* Azure Database Connection Config
*/
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
export interface AuthConfigurationType {
/**
* Password to connect to source.
*/

View File

@ -3473,6 +3473,8 @@ export interface ConfigObject {
/**
* Types of methods used to authenticate to the tableau instance
*
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the alation instance
@ -4560,6 +4562,16 @@ export enum AuthMechanismEnum {
*
* Access Token Auth Credentials
*
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -4613,8 +4625,35 @@ export interface AuthenticationTypeForTableau {
* Personal Access Token Secret.
*/
personalAccessTokenSecret?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* JWT to connect to source.
*/

View File

@ -806,6 +806,8 @@ export interface ConfigObject {
/**
* Types of methods used to authenticate to the tableau instance
*
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the alation instance
@ -1956,6 +1958,16 @@ export enum AuthProvider {
*
* Access Token Auth Credentials
*
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -2009,8 +2021,35 @@ export interface AuthenticationTypeForTableau {
* Personal Access Token Secret.
*/
personalAccessTokenSecret?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* JWT to connect to source.
*/

View File

@ -842,6 +842,8 @@ export interface ConfigObject {
/**
* Types of methods used to authenticate to the tableau instance
*
* Choose between different authentication types for Databricks.
*
* Choose Auth Config Type.
*
* Types of methods used to authenticate to the alation instance
@ -2013,6 +2015,16 @@ export enum AuthProvider {
*
* Access Token Auth Credentials
*
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*
* Choose Auth Config Type.
*
* Common Database Connection Config
@ -2066,8 +2078,35 @@ export interface AuthenticationTypeForTableau {
* Personal Access Token Secret.
*/
personalAccessTokenSecret?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
awsConfig?: AWSCredentials;
azureConfig?: AzureCredentials;
/**
* JWT to connect to source.
*/