mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2026-01-07 13:07:22 +00:00
* feat: databricks oauth and azure ad auth setup * refactor: add auth type changes in databricks.md * fix: test after oauth changes * refactor: unity catalog connection to databricks connection code * feat: added oauth and azure ad for unity catalog * fix: unitycatalog tests, doc & required type in connection.json * fix: generated tx files * fix: exporter databricksConnection file * refactor: unitycatalog example file * fix: usage example files * fix: unity catalog sqlalchemy connection * fix: unity catalog client headers * refactor: make common auth.py for dbx and unitycatalog * fix: auth functions import * fix: test unity catalog tags as None * fix: type hinting and sql migration * fix: migration for postgres
This commit is contained in:
parent
7ecdf586ea
commit
3d49b6689d
@ -0,0 +1,15 @@
|
||||
-- Migration script to restructure Databricks connection configuration
|
||||
-- Move 'token' field from connection.config.token to connection.config.authType.token
|
||||
UPDATE dbservice_entity
|
||||
SET
|
||||
json = JSON_SET (
|
||||
JSON_REMOVE (json, '$.connection.config.token'),
|
||||
'$.connection.config.authType',
|
||||
JSON_OBJECT (
|
||||
'token',
|
||||
JSON_EXTRACT (json, '$.connection.config.token')
|
||||
)
|
||||
)
|
||||
WHERE
|
||||
serviceType in ('Databricks', 'UnityCatalog')
|
||||
AND JSON_CONTAINS_PATH(json, 'one', '$.connection.config.token');
|
||||
@ -0,0 +1,12 @@
|
||||
-- Migration script to restructure Databricks connection configuration
|
||||
-- Move 'token' field from connection.config.token to connection.config.authType.token
|
||||
|
||||
UPDATE dbservice_entity
|
||||
SET json = jsonb_set(
|
||||
json #- '{connection,config,token}',
|
||||
'{connection,config,authType}',
|
||||
jsonb_build_object('token', json #> '{connection,config,token}'),
|
||||
true
|
||||
)
|
||||
WHERE serviceType in ('Databricks', 'UnityCatalog')
|
||||
AND jsonb_exists(json -> 'connection' -> 'config', 'token');
|
||||
@ -4,4 +4,4 @@
|
||||
-- `profileData`field back to the original state.
|
||||
UPDATE profiler_data_time_series
|
||||
SET json = JSON_SET(json, '$.profileData', json->'$.profileData.profileData')
|
||||
WHERE json->>'$.profileData.profileData' IS NOT NULL;
|
||||
WHERE json->>'$.profileData.profileData' IS NOT NULL;
|
||||
|
||||
@ -4,4 +4,4 @@
|
||||
-- `profileData`field back to the original state.
|
||||
UPDATE profiler_data_time_series
|
||||
SET json = jsonb_set(json::jsonb, '{profileData}', json::jsonb->'profileData'->'profileData')::json
|
||||
WHERE json->'profileData'->>'profileData' IS NOT NULL;
|
||||
WHERE json->'profileData'->>'profileData' IS NOT NULL;
|
||||
|
||||
@ -5,7 +5,15 @@ source:
|
||||
config:
|
||||
catalog: hive_metastore
|
||||
databaseSchema: default
|
||||
token: <databricks token>
|
||||
|
||||
authType:
|
||||
token: <databricks token>
|
||||
# clientId: databricks service principal client id
|
||||
# clientSecret: databricks service principal client secret
|
||||
# azureClientSecret: azure client secret
|
||||
# azureClientId: azure client id
|
||||
# azureTenantId: azure tenant id
|
||||
|
||||
hostPort: localhost:443
|
||||
connectionTimeout: 120
|
||||
connectionArguments:
|
||||
|
||||
@ -3,7 +3,13 @@ source:
|
||||
serviceName: local_databricks
|
||||
serviceConnection:
|
||||
config:
|
||||
token: <databricks token>
|
||||
authType:
|
||||
token: <databricks token>
|
||||
# clientId: databricks service principal client id
|
||||
# clientSecret: databricks service principal client secret
|
||||
# azureClientSecret: azure client secret
|
||||
# azureClientId: azure client id
|
||||
# azureTenantId: azure tenant id
|
||||
hostPort: localhost:443
|
||||
connectionArguments:
|
||||
http_path: <http path of databricks cluster>
|
||||
|
||||
@ -6,7 +6,13 @@ source:
|
||||
type: UnityCatalog
|
||||
catalog: hive_metastore
|
||||
databaseSchema: default
|
||||
token: <databricks token>
|
||||
authType:
|
||||
token: <databricks token>
|
||||
# clientId: databricks service principal client id
|
||||
# clientSecret: databricks service principal client secret
|
||||
# azureClientSecret: azure client secret
|
||||
# azureClientId: azure client id
|
||||
# azureTenantId: azure tenant id
|
||||
hostPort: localhost:443
|
||||
connectionTimeout: 120
|
||||
connectionArguments:
|
||||
|
||||
@ -6,7 +6,13 @@ source:
|
||||
type: UnityCatalog
|
||||
catalog: hive_metastore
|
||||
databaseSchema: default
|
||||
token: <databricks token>
|
||||
authType:
|
||||
token: <databricks token>
|
||||
# clientId: databricks service principal client id
|
||||
# clientSecret: databricks service principal client secret
|
||||
# azureClientSecret: azure client secret
|
||||
# azureClientId: azure client id
|
||||
# azureTenantId: azure tenant id
|
||||
hostPort: localhost:443
|
||||
connectionTimeout: 120
|
||||
connectionArguments:
|
||||
|
||||
@ -0,0 +1,99 @@
|
||||
# Copyright 2025 Collate
|
||||
# Licensed under the Collate Community License, Version 1.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
# https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/LICENSE
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
This module provides authentication utilities for Databricks and Unity Catalog connections.
|
||||
"""
|
||||
from typing import Union
|
||||
|
||||
from databricks.sdk.core import Config, azure_service_principal, oauth_service_principal
|
||||
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.azureAdSetup import (
|
||||
AzureAdSetup,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||
DatabricksOauth,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
||||
DatabricksConnection,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.unityCatalogConnection import (
|
||||
UnityCatalogConnection,
|
||||
)
|
||||
|
||||
|
||||
def get_personal_access_token_auth(
|
||||
connection: Union[DatabricksConnection, UnityCatalogConnection],
|
||||
) -> dict:
|
||||
"""
|
||||
Configure Personal Access Token authentication
|
||||
"""
|
||||
return {"access_token": connection.authType.token.get_secret_value()}
|
||||
|
||||
|
||||
def get_databricks_oauth_auth(
|
||||
connection: Union[DatabricksConnection, UnityCatalogConnection],
|
||||
):
|
||||
"""
|
||||
Create Databricks OAuth2 M2M credentials provider for Service Principal authentication
|
||||
"""
|
||||
|
||||
def credential_provider():
|
||||
hostname = connection.hostPort.split(":")[0]
|
||||
config = Config(
|
||||
host=f"https://{hostname}",
|
||||
client_id=connection.authType.clientId,
|
||||
client_secret=connection.authType.clientSecret.get_secret_value(),
|
||||
)
|
||||
return oauth_service_principal(config)
|
||||
|
||||
return {"credentials_provider": credential_provider}
|
||||
|
||||
|
||||
def get_azure_ad_auth(connection: Union[DatabricksConnection, UnityCatalogConnection]):
|
||||
"""
|
||||
Create Azure AD credentials provider for Azure Service Principal authentication
|
||||
"""
|
||||
|
||||
def credential_provider():
|
||||
hostname = connection.hostPort.split(":")[0]
|
||||
config = Config(
|
||||
host=f"https://{hostname}",
|
||||
azure_client_secret=connection.authType.azureClientSecret.get_secret_value(),
|
||||
azure_client_id=connection.authType.azureClientId,
|
||||
azure_tenant_id=connection.authType.azureTenantId,
|
||||
)
|
||||
return azure_service_principal(config)
|
||||
|
||||
return {"credentials_provider": credential_provider}
|
||||
|
||||
|
||||
def get_auth_config(
|
||||
connection: Union[DatabricksConnection, UnityCatalogConnection],
|
||||
) -> dict:
|
||||
"""
|
||||
Get authentication configuration for Databricks connection
|
||||
"""
|
||||
auth_method = {
|
||||
PersonalAccessToken: get_personal_access_token_auth,
|
||||
DatabricksOauth: get_databricks_oauth_auth,
|
||||
AzureAdSetup: get_azure_ad_auth,
|
||||
}.get(type(connection.authType))
|
||||
|
||||
if not auth_method:
|
||||
raise ValueError(
|
||||
f"Unsupported authentication type: {type(connection.authType)}"
|
||||
)
|
||||
|
||||
return auth_method(connection)
|
||||
@ -61,14 +61,13 @@ class DatabricksClient:
|
||||
):
|
||||
self.config = config
|
||||
base_url, *_ = self.config.hostPort.split(":")
|
||||
auth_token = self.config.token.get_secret_value()
|
||||
self.base_url = f"https://{base_url}{API_VERSION}"
|
||||
self.base_query_url = f"{self.base_url}{QUERIES_PATH}"
|
||||
self.base_job_url = f"https://{base_url}{JOB_API_VERSION}/jobs"
|
||||
self.jobs_list_url = f"{self.base_job_url}/list"
|
||||
self.jobs_run_list_url = f"{self.base_job_url}/runs/list"
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {auth_token}",
|
||||
**self._get_auth_header(),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
self.api_timeout = self.config.connectionTimeout or 120
|
||||
@ -81,6 +80,12 @@ class DatabricksClient:
|
||||
self.engine = engine
|
||||
self.client = requests
|
||||
|
||||
def _get_auth_header(self) -> dict[str, str]:
|
||||
"""
|
||||
Method to get auth header
|
||||
"""
|
||||
return {"Authorization": f"Bearer {self.config.token.get_secret_value()}"}
|
||||
|
||||
def test_query_api_access(self) -> None:
|
||||
res = self.client.get(
|
||||
self.base_query_url, headers=self.headers, timeout=self.api_timeout
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
"""
|
||||
Source connection handler
|
||||
"""
|
||||
from copy import deepcopy
|
||||
from functools import partial
|
||||
from typing import Optional
|
||||
|
||||
@ -38,6 +39,7 @@ from metadata.ingestion.connections.test_connections import (
|
||||
test_connection_steps,
|
||||
)
|
||||
from metadata.ingestion.ometa.ometa_api import OpenMetadata
|
||||
from metadata.ingestion.source.database.databricks.auth import get_auth_config
|
||||
from metadata.ingestion.source.database.databricks.queries import (
|
||||
DATABRICKS_GET_CATALOGS,
|
||||
DATABRICKS_SQL_STATEMENT_TEST,
|
||||
@ -129,8 +131,7 @@ class DatabricksEngineWrapper:
|
||||
|
||||
|
||||
def get_connection_url(connection: DatabricksConnection) -> str:
|
||||
url = f"{connection.scheme.value}://token:{connection.token.get_secret_value()}@{connection.hostPort}"
|
||||
return url
|
||||
return f"{connection.scheme.value}://{connection.hostPort}"
|
||||
|
||||
|
||||
def get_connection(connection: DatabricksConnection) -> Engine:
|
||||
@ -138,17 +139,27 @@ def get_connection(connection: DatabricksConnection) -> Engine:
|
||||
Create connection
|
||||
"""
|
||||
|
||||
if not connection.connectionArguments:
|
||||
connection.connectionArguments = init_empty_connection_arguments()
|
||||
|
||||
if connection.httpPath:
|
||||
if not connection.connectionArguments:
|
||||
connection.connectionArguments = init_empty_connection_arguments()
|
||||
connection.connectionArguments.root["http_path"] = connection.httpPath
|
||||
|
||||
return create_generic_db_connection(
|
||||
auth_args = get_auth_config(connection)
|
||||
|
||||
original_connection_arguments = connection.connectionArguments
|
||||
connection.connectionArguments = deepcopy(original_connection_arguments)
|
||||
connection.connectionArguments.root.update(auth_args)
|
||||
|
||||
engine = create_generic_db_connection(
|
||||
connection=connection,
|
||||
get_connection_url_fn=get_connection_url,
|
||||
get_connection_args_fn=get_connection_args_common,
|
||||
)
|
||||
|
||||
connection.connectionArguments = original_connection_arguments
|
||||
return engine
|
||||
|
||||
|
||||
def test_connection(
|
||||
metadata: OpenMetadata,
|
||||
|
||||
@ -16,6 +16,20 @@ import traceback
|
||||
|
||||
from requests import HTTPError
|
||||
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.azureAdSetup import (
|
||||
AzureAdSetup,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||
DatabricksOauth,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.ingestion.source.database.databricks.auth import (
|
||||
get_azure_ad_auth,
|
||||
get_databricks_oauth_auth,
|
||||
get_personal_access_token_auth,
|
||||
)
|
||||
from metadata.ingestion.source.database.databricks.client import (
|
||||
API_TIMEOUT,
|
||||
DatabricksClient,
|
||||
@ -37,6 +51,26 @@ class UnityCatalogClient(DatabricksClient):
|
||||
UnityCatalogClient creates a Databricks connection based on DatabricksCredentials.
|
||||
"""
|
||||
|
||||
def _get_auth_header(self) -> dict[str, str]:
|
||||
"""
|
||||
Method to get auth header
|
||||
"""
|
||||
auth_method = {
|
||||
PersonalAccessToken: get_personal_access_token_auth,
|
||||
DatabricksOauth: get_databricks_oauth_auth,
|
||||
AzureAdSetup: get_azure_ad_auth,
|
||||
}.get(type(self.config.authType))
|
||||
if not auth_method:
|
||||
raise ValueError(
|
||||
f"Unsupported authentication type: {type(self.config.authType)}"
|
||||
)
|
||||
|
||||
auth_args = auth_method(self.config)
|
||||
if auth_args.get("access_token"):
|
||||
return {"Authorization": f"Bearer {auth_args['access_token']}"}
|
||||
|
||||
return auth_args["credentials_provider"]()()
|
||||
|
||||
def get_table_lineage(self, table_name: str) -> LineageTableStreams:
|
||||
"""
|
||||
Method returns table lineage details
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
"""
|
||||
Source connection handler
|
||||
"""
|
||||
from copy import deepcopy
|
||||
from functools import partial
|
||||
from typing import Optional
|
||||
|
||||
@ -22,6 +23,15 @@ from sqlalchemy.exc import DatabaseError
|
||||
from metadata.generated.schema.entity.automations.workflow import (
|
||||
Workflow as AutomationWorkflow,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.azureAdSetup import (
|
||||
AzureAdSetup,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||
DatabricksOauth,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.unityCatalogConnection import (
|
||||
UnityCatalogConnection,
|
||||
)
|
||||
@ -35,6 +45,7 @@ from metadata.ingestion.connections.builders import (
|
||||
)
|
||||
from metadata.ingestion.connections.test_connections import test_connection_steps
|
||||
from metadata.ingestion.ometa.ometa_api import OpenMetadata
|
||||
from metadata.ingestion.source.database.databricks.auth import get_auth_config
|
||||
from metadata.ingestion.source.database.unitycatalog.models import DatabricksTable
|
||||
from metadata.ingestion.source.database.unitycatalog.queries import (
|
||||
UNITY_CATALOG_GET_ALL_SCHEMA_TAGS,
|
||||
@ -51,7 +62,7 @@ logger = ingestion_logger()
|
||||
|
||||
|
||||
def get_connection_url(connection: UnityCatalogConnection) -> str:
|
||||
url = f"{connection.scheme.value}://token:{connection.token.get_secret_value()}@{connection.hostPort}"
|
||||
url = f"{connection.scheme.value}://{connection.hostPort}"
|
||||
return url
|
||||
|
||||
|
||||
@ -59,10 +70,23 @@ def get_connection(connection: UnityCatalogConnection) -> WorkspaceClient:
|
||||
"""
|
||||
Create connection
|
||||
"""
|
||||
client_params = {}
|
||||
if isinstance(connection.authType, PersonalAccessToken):
|
||||
client_params["token"] = connection.authType.token.get_secret_value()
|
||||
elif isinstance(connection.authType, DatabricksOauth):
|
||||
client_params["client_id"] = connection.authType.clientId
|
||||
client_params[
|
||||
"client_secret"
|
||||
] = connection.authType.clientSecret.get_secret_value()
|
||||
elif isinstance(connection.authType, AzureAdSetup):
|
||||
client_params["azure_client_id"] = connection.authType.azureClientId
|
||||
client_params[
|
||||
"azure_client_secret"
|
||||
] = connection.authType.azureClientSecret.get_secret_value()
|
||||
client_params["azure_tenant_id"] = connection.authType.azureTenantId
|
||||
|
||||
return WorkspaceClient(
|
||||
host=get_host_from_host_port(connection.hostPort),
|
||||
token=connection.token.get_secret_value(),
|
||||
host=get_host_from_host_port(connection.hostPort), **client_params
|
||||
)
|
||||
|
||||
|
||||
@ -76,11 +100,19 @@ def get_sqlalchemy_connection(connection: UnityCatalogConnection) -> Engine:
|
||||
connection.connectionArguments = init_empty_connection_arguments()
|
||||
connection.connectionArguments.root["http_path"] = connection.httpPath
|
||||
|
||||
return create_generic_db_connection(
|
||||
auth_args = get_auth_config(connection)
|
||||
|
||||
original_connection_arguments = connection.connectionArguments
|
||||
connection.connectionArguments = deepcopy(original_connection_arguments)
|
||||
connection.connectionArguments.root.update(auth_args)
|
||||
|
||||
engine = create_generic_db_connection(
|
||||
connection=connection,
|
||||
get_connection_url_fn=get_connection_url,
|
||||
get_connection_args_fn=get_connection_args_common,
|
||||
)
|
||||
connection.connectionArguments = original_connection_arguments
|
||||
return engine
|
||||
|
||||
|
||||
def test_connection(
|
||||
|
||||
@ -46,6 +46,11 @@ def _(*_, **__):
|
||||
return "SELECT SESSION_USER()"
|
||||
|
||||
|
||||
@compiles(ConnTestFn, Dialects.Databricks)
|
||||
def _(*_, **__):
|
||||
return "SELECT '42'"
|
||||
|
||||
|
||||
@compiles(ConnTestFn, Dialects.Db2)
|
||||
@compiles(ConnTestFn, Dialects.IbmDbSa)
|
||||
@compiles(ConnTestFn, Dialects.Ibmi)
|
||||
|
||||
@ -85,7 +85,7 @@ mock_databricks_config = {
|
||||
"serviceName": "local_databricks1",
|
||||
"serviceConnection": {
|
||||
"config": {
|
||||
"token": "random_token",
|
||||
"authType": {"token": "random_token"},
|
||||
"hostPort": "localhost:443",
|
||||
"httpPath": "sql/1.0/endpoints/path",
|
||||
"connectionArguments": {
|
||||
|
||||
@ -21,6 +21,9 @@ from sqlalchemy.orm import declarative_base
|
||||
|
||||
from metadata.generated.schema.entity.data.table import Column as EntityColumn
|
||||
from metadata.generated.schema.entity.data.table import ColumnName, DataType, Table
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.unityCatalogConnection import (
|
||||
UnityCatalogConnection,
|
||||
)
|
||||
@ -67,7 +70,7 @@ class UnityCatalogSamplerTest(TestCase):
|
||||
|
||||
self.unity_catalog_conn = UnityCatalogConnection(
|
||||
hostPort="localhost:443",
|
||||
token="test_token",
|
||||
authType=PersonalAccessToken(token="test_token"),
|
||||
httpPath="/sql/1.0/warehouses/test",
|
||||
catalog="test_catalog",
|
||||
)
|
||||
|
||||
@ -30,6 +30,12 @@ from metadata.generated.schema.entity.services.connections.database.common.basic
|
||||
from metadata.generated.schema.entity.services.connections.database.common.jwtAuth import (
|
||||
JwtAuth,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.databricksOAuth import (
|
||||
DatabricksOauth,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.connections.database.databricksConnection import (
|
||||
DatabricksConnection,
|
||||
DatabricksScheme,
|
||||
@ -130,13 +136,11 @@ class SourceConnectionTest(TestCase):
|
||||
get_connection_url,
|
||||
)
|
||||
|
||||
expected_result = (
|
||||
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
|
||||
)
|
||||
expected_result = "databricks+connector://1.1.1.1:443"
|
||||
databricks_conn_obj = DatabricksConnection(
|
||||
scheme=DatabricksScheme.databricks_connector,
|
||||
hostPort="1.1.1.1:443",
|
||||
token="KlivDTACWXKmZVfN1qIM",
|
||||
authType=PersonalAccessToken(token="KlivDTACWXKmZVfN1qIM"),
|
||||
httpPath="/sql/1.0/warehouses/abcdedfg",
|
||||
)
|
||||
assert expected_result == get_connection_url(databricks_conn_obj)
|
||||
@ -146,14 +150,16 @@ class SourceConnectionTest(TestCase):
|
||||
get_connection_url,
|
||||
)
|
||||
|
||||
expected_result = (
|
||||
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
|
||||
)
|
||||
expected_result = "databricks+connector://1.1.1.1:443"
|
||||
databricks_conn_obj = DatabricksConnection(
|
||||
scheme=DatabricksScheme.databricks_connector,
|
||||
hostPort="1.1.1.1:443",
|
||||
token="KlivDTACWXKmZVfN1qIM",
|
||||
authType=DatabricksOauth(
|
||||
clientId="d40e2905-88ef-42ab-8898-fbefff2d071d",
|
||||
clientSecret="secret-value",
|
||||
),
|
||||
httpPath="/sql/1.0/warehouses/abcdedfg",
|
||||
catalog="main",
|
||||
)
|
||||
assert expected_result == get_connection_url(databricks_conn_obj)
|
||||
|
||||
|
||||
@ -235,7 +235,11 @@ def test_databricks():
|
||||
"serviceName": "local_databricks",
|
||||
"serviceConnection": {
|
||||
"config": {
|
||||
"token": "<databricks token>",
|
||||
"authType": {
|
||||
"azureClientId": "3df43ed7-5f2f-46bb-9793-384c6374a81d",
|
||||
"azureClientSecret": "secret-value",
|
||||
"azureTenantId": "3df43ed7-5g1f-46bb-9793-384c6374a81d",
|
||||
},
|
||||
"hostPort": "localhost:443",
|
||||
"httpPath": "<http path of databricks cluster>",
|
||||
"connectionArguments": {
|
||||
|
||||
@ -23,6 +23,9 @@ from metadata.generated.schema.api.data.createTable import CreateTableRequest
|
||||
from metadata.generated.schema.entity.data.database import Database
|
||||
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
||||
from metadata.generated.schema.entity.data.table import Column, DataType, TableType
|
||||
from metadata.generated.schema.entity.services.connections.database.databricks.personalAccessToken import (
|
||||
PersonalAccessToken,
|
||||
)
|
||||
from metadata.generated.schema.entity.services.databaseService import (
|
||||
DatabaseConnection,
|
||||
DatabaseService,
|
||||
@ -45,7 +48,9 @@ mock_databricks_config = {
|
||||
"type": "Databricks",
|
||||
"catalog": "hive_metastore",
|
||||
"databaseSchema": "default",
|
||||
"token": "123sawdtesttoken",
|
||||
"authType": {
|
||||
"token": "123sawdtesttoken",
|
||||
},
|
||||
"hostPort": "localhost:443",
|
||||
"httpPath": "/sql/1.0/warehouses/abcdedfg",
|
||||
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
|
||||
@ -397,12 +402,12 @@ class DatabricksConnectionTest(TestCase):
|
||||
connection = self.DatabricksConnection(
|
||||
scheme=self.DatabricksScheme.databricks_connector,
|
||||
hostPort="test-host:443",
|
||||
token="test-token",
|
||||
authType=PersonalAccessToken(token="test-token"),
|
||||
httpPath="/sql/1.0/warehouses/test",
|
||||
)
|
||||
|
||||
url = self.get_connection_url(connection)
|
||||
expected_url = "databricks+connector://token:test-token@test-host:443"
|
||||
expected_url = "databricks+connector://test-host:443"
|
||||
self.assertEqual(url, expected_url)
|
||||
|
||||
@patch(
|
||||
@ -413,7 +418,7 @@ class DatabricksConnectionTest(TestCase):
|
||||
connection = self.DatabricksConnection(
|
||||
scheme=self.DatabricksScheme.databricks_connector,
|
||||
hostPort="test-host:443",
|
||||
token="test-token",
|
||||
authType=PersonalAccessToken(token="test-token"),
|
||||
httpPath="/sql/1.0/warehouses/test",
|
||||
)
|
||||
|
||||
@ -769,7 +774,7 @@ class DatabricksConnectionTest(TestCase):
|
||||
service_connection = DatabricksConnection(
|
||||
scheme=DatabricksScheme.databricks_connector,
|
||||
hostPort="test-host:443",
|
||||
token="test-token",
|
||||
authType=PersonalAccessToken(token="test-token"),
|
||||
httpPath="/sql/1.0/warehouses/test",
|
||||
queryHistoryTable="test_table",
|
||||
)
|
||||
|
||||
@ -69,7 +69,7 @@ mock_unitycatalog_config = {
|
||||
"type": "UnityCatalog",
|
||||
"catalog": "hive_metastore",
|
||||
"databaseSchema": "default",
|
||||
"token": "123sawdtesttoken",
|
||||
"authType": {"token": "123sawdtesttoken"},
|
||||
"hostPort": "localhost:443",
|
||||
"httpPath": "/sql/1.0/warehouses/abcdedfg",
|
||||
"connectionTimeout": 120,
|
||||
@ -483,6 +483,7 @@ EXPTECTED_TABLE = [
|
||||
dataType=DataType.INT,
|
||||
dataLength=1,
|
||||
dataTypeDisplay="int",
|
||||
tags=None,
|
||||
),
|
||||
Column(
|
||||
name=ColumnName(root="array_data"),
|
||||
@ -490,18 +491,21 @@ EXPTECTED_TABLE = [
|
||||
arrayDataType=DataType.INT,
|
||||
dataLength=1,
|
||||
dataTypeDisplay="array<int>",
|
||||
tags=None,
|
||||
),
|
||||
Column(
|
||||
name=ColumnName(root="map_data"),
|
||||
dataType=DataType.MAP,
|
||||
dataLength=1,
|
||||
dataTypeDisplay="map<string,int>",
|
||||
tags=None,
|
||||
),
|
||||
Column(
|
||||
name=ColumnName(root="struct_data"),
|
||||
dataType=DataType.STRUCT,
|
||||
dataLength=1,
|
||||
dataTypeDisplay="struct<a:int,b:string,c:array<string>,d:struct<abc:int>>",
|
||||
tags=None,
|
||||
children=[
|
||||
Column(
|
||||
name=ColumnName(root="a"),
|
||||
|
||||
@ -30,6 +30,7 @@ import org.openmetadata.schema.services.connections.database.BigQueryConnection;
|
||||
import org.openmetadata.schema.services.connections.database.BigTableConnection;
|
||||
import org.openmetadata.schema.services.connections.database.CassandraConnection;
|
||||
import org.openmetadata.schema.services.connections.database.CockroachConnection;
|
||||
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
|
||||
import org.openmetadata.schema.services.connections.database.DatalakeConnection;
|
||||
import org.openmetadata.schema.services.connections.database.DeltaLakeConnection;
|
||||
import org.openmetadata.schema.services.connections.database.GreenplumConnection;
|
||||
@ -41,6 +42,7 @@ import org.openmetadata.schema.services.connections.database.RedshiftConnection;
|
||||
import org.openmetadata.schema.services.connections.database.SalesforceConnection;
|
||||
import org.openmetadata.schema.services.connections.database.SapHanaConnection;
|
||||
import org.openmetadata.schema.services.connections.database.TrinoConnection;
|
||||
import org.openmetadata.schema.services.connections.database.UnityCatalogConnection;
|
||||
import org.openmetadata.schema.services.connections.database.datalake.GCSConfig;
|
||||
import org.openmetadata.schema.services.connections.database.deltalake.StorageConfig;
|
||||
import org.openmetadata.schema.services.connections.database.iceberg.IcebergFileSystem;
|
||||
@ -104,6 +106,8 @@ public final class ClassConverterFactory {
|
||||
Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()),
|
||||
Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()),
|
||||
Map.entry(RangerConnection.class, new RangerConnectionClassConverter()),
|
||||
Map.entry(DatabricksConnection.class, new DatabricksConnectionClassConverter()),
|
||||
Map.entry(UnityCatalogConnection.class, new UnityCatalogConnectionClassConverter()),
|
||||
Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()),
|
||||
Map.entry(SSISConnection.class, new SsisConnectionClassConverter()),
|
||||
Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()));
|
||||
|
||||
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2021 Collate
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.openmetadata.service.secrets.converter;
|
||||
|
||||
import java.util.List;
|
||||
import org.openmetadata.schema.services.connections.database.DatabricksConnection;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.AzureADSetup;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
|
||||
/** Converter class to get a `DatabricksConnection` object. */
|
||||
public class DatabricksConnectionClassConverter extends ClassConverter {
|
||||
|
||||
private static final List<Class<?>> CONFIG_SOURCE_CLASSES =
|
||||
List.of(PersonalAccessToken.class, DatabricksOAuth.class, AzureADSetup.class);
|
||||
|
||||
public DatabricksConnectionClassConverter() {
|
||||
super(DatabricksConnection.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object convert(Object object) {
|
||||
DatabricksConnection databricksConnection =
|
||||
(DatabricksConnection) JsonUtils.convertValue(object, this.clazz);
|
||||
|
||||
tryToConvert(databricksConnection.getAuthType(), CONFIG_SOURCE_CLASSES)
|
||||
.ifPresent(databricksConnection::setAuthType);
|
||||
|
||||
return databricksConnection;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2021 Collate
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.openmetadata.service.secrets.converter;
|
||||
|
||||
import java.util.List;
|
||||
import org.openmetadata.schema.services.connections.database.UnityCatalogConnection;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.AzureADSetup;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth;
|
||||
import org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
|
||||
/** Converter class to get a `UnityCatalogConnection` object. */
|
||||
public class UnityCatalogConnectionClassConverter extends ClassConverter {
|
||||
|
||||
private static final List<Class<?>> CONFIG_SOURCE_CLASSES =
|
||||
List.of(PersonalAccessToken.class, DatabricksOAuth.class, AzureADSetup.class);
|
||||
|
||||
public UnityCatalogConnectionClassConverter() {
|
||||
super(UnityCatalogConnection.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object convert(Object object) {
|
||||
UnityCatalogConnection unityCatalogConnection =
|
||||
(UnityCatalogConnection) JsonUtils.convertValue(object, this.clazz);
|
||||
|
||||
tryToConvert(unityCatalogConnection.getAuthType(), CONFIG_SOURCE_CLASSES)
|
||||
.ifPresent(unityCatalogConnection::setAuthType);
|
||||
|
||||
return unityCatalogConnection;
|
||||
}
|
||||
}
|
||||
@ -40,11 +40,23 @@
|
||||
"description": "Host and port of the Databricks service.",
|
||||
"type": "string"
|
||||
},
|
||||
"token": {
|
||||
"title": "Token",
|
||||
"description": "Generated Token to connect to Databricks.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
"authType": {
|
||||
"title": "Authentication Type",
|
||||
"description": "Choose between different authentication types for Databricks.",
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "Personal Access Token",
|
||||
"$ref": "../../../../services/connections/database/databricks/personalAccessToken.json"
|
||||
},
|
||||
{
|
||||
"title": "Databricks OAuth",
|
||||
"$ref": "../../../../services/connections/database/databricks/databricksOAuth.json"
|
||||
},
|
||||
{
|
||||
"title": "Azure AD Setup",
|
||||
"$ref": "../../../../services/connections/database/databricks/azureAdSetup.json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"httpPath": {
|
||||
"title": "Http Path",
|
||||
@ -79,7 +91,7 @@
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"hostPort",
|
||||
"token",
|
||||
"authType",
|
||||
"httpPath"
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,32 @@
|
||||
{
|
||||
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/azureAdSetup.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Azure AD Setup",
|
||||
"description": "Azure Active Directory authentication for Azure Databricks workspaces using Service Principal.",
|
||||
"javaType": "org.openmetadata.schema.services.connections.database.databricks.AzureADSetup",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"azureClientId": {
|
||||
"title": "Azure Client ID",
|
||||
"description": "Azure Service Principal Application (client) ID registered in your Azure Active Directory.",
|
||||
"type": "string"
|
||||
},
|
||||
"azureClientSecret": {
|
||||
"title": "Azure Client Secret",
|
||||
"description": "Azure Service Principal client secret created in Azure AD for authentication.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
},
|
||||
"azureTenantId": {
|
||||
"title": "Azure Tenant ID",
|
||||
"description": "Azure Active Directory Tenant ID where your Service Principal is registered.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"azureClientId",
|
||||
"azureClientSecret",
|
||||
"azureTenantId"
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
{
|
||||
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/databricksOAuth.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Databricks OAuth",
|
||||
"description": "OAuth2 Machine-to-Machine authentication using Service Principal credentials for Databricks.",
|
||||
"javaType": "org.openmetadata.schema.services.connections.database.databricks.DatabricksOAuth",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"clientId": {
|
||||
"title": "Client ID",
|
||||
"description": "Service Principal Application ID created in your Databricks Account Console for OAuth Machine-to-Machine authentication.",
|
||||
"type": "string"
|
||||
},
|
||||
"clientSecret": {
|
||||
"title": "Client Secret",
|
||||
"description": "OAuth Secret generated for the Service Principal in Databricks Account Console. Used for secure OAuth2 authentication.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"clientId",
|
||||
"clientSecret"
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,20 @@
|
||||
{
|
||||
"$id": "https://open-metadata.org/schema/entity/services/connections/database/databricks/personalAccessToken.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Personal Access Token",
|
||||
"description": "Personal Access Token authentication for Databricks.",
|
||||
"javaType": "org.openmetadata.schema.services.connections.database.databricks.PersonalAccessToken",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"token": {
|
||||
"title": "Token",
|
||||
"description": "Generated Personal Access Token for Databricks workspace authentication. This token is created from User Settings -> Developer -> Access Tokens in your Databricks workspace.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"token"
|
||||
]
|
||||
}
|
||||
@ -41,11 +41,23 @@
|
||||
"description": "Host and port of the Databricks service.",
|
||||
"type": "string"
|
||||
},
|
||||
"token": {
|
||||
"title": "Token",
|
||||
"description": "Generated Token to connect to Databricks.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
"authType": {
|
||||
"title": "Authentication Type",
|
||||
"description": "Choose between different authentication types for Databricks.",
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "Personal Access Token",
|
||||
"$ref": "./databricks/personalAccessToken.json"
|
||||
},
|
||||
{
|
||||
"title": "Databricks OAuth",
|
||||
"$ref": "./databricks/databricksOAuth.json"
|
||||
},
|
||||
{
|
||||
"title": "Azure AD Setup",
|
||||
"$ref": "./databricks/azureAdSetup.json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"httpPath": {
|
||||
"title": "Http Path",
|
||||
@ -142,7 +154,7 @@
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"hostPort",
|
||||
"token",
|
||||
"httpPath"
|
||||
"httpPath",
|
||||
"authType"
|
||||
]
|
||||
}
|
||||
@ -9,13 +9,17 @@
|
||||
"databricksType": {
|
||||
"description": "Service type.",
|
||||
"type": "string",
|
||||
"enum": ["UnityCatalog"],
|
||||
"enum": [
|
||||
"UnityCatalog"
|
||||
],
|
||||
"default": "UnityCatalog"
|
||||
},
|
||||
"databricksScheme": {
|
||||
"description": "SQLAlchemy driver scheme options.",
|
||||
"type": "string",
|
||||
"enum": ["databricks+connector"],
|
||||
"enum": [
|
||||
"databricks+connector"
|
||||
],
|
||||
"default": "databricks+connector"
|
||||
}
|
||||
},
|
||||
@ -37,11 +41,23 @@
|
||||
"description": "Host and port of the Databricks service.",
|
||||
"type": "string"
|
||||
},
|
||||
"token": {
|
||||
"title": "Token",
|
||||
"description": "Generated Token to connect to Databricks.",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
"authType": {
|
||||
"title": "Authentication Type",
|
||||
"description": "Choose between different authentication types for Databricks.",
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "Personal Access Token",
|
||||
"$ref": "./databricks/personalAccessToken.json"
|
||||
},
|
||||
{
|
||||
"title": "Databricks OAuth",
|
||||
"$ref": "./databricks/databricksOAuth.json"
|
||||
},
|
||||
{
|
||||
"title": "Azure AD Setup",
|
||||
"$ref": "./databricks/azureAdSetup.json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"httpPath": {
|
||||
"title": "Http Path",
|
||||
@ -78,7 +94,9 @@
|
||||
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
|
||||
"default": {
|
||||
"includes": [],
|
||||
"excludes": ["^information_schema$"]
|
||||
"excludes": [
|
||||
"^information_schema$"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tableFilterPattern": {
|
||||
@ -92,7 +110,9 @@
|
||||
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
|
||||
"default": {
|
||||
"includes": [],
|
||||
"excludes": ["^system$"]
|
||||
"excludes": [
|
||||
"^system$"
|
||||
]
|
||||
}
|
||||
},
|
||||
"supportsUsageExtraction": {
|
||||
@ -126,5 +146,8 @@
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": ["hostPort", "token"]
|
||||
}
|
||||
"required": [
|
||||
"hostPort",
|
||||
"authType"
|
||||
]
|
||||
}
|
||||
@ -32,9 +32,49 @@ This parameter specifies the host and port of the Databricks instance. This shou
|
||||
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:3000` as the value.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Authentication Type $(id="authType")
|
||||
Select the authentication method to connect to your Databricks workspace.
|
||||
|
||||
- **Personal Access Token**: Generated Personal Access Token for Databricks workspace authentication.
|
||||
|
||||
- **Databricks OAuth**: OAuth2 Machine-to-Machine authentication using a Service Principal.
|
||||
|
||||
- **Azure AD Setup**: Specifically for Azure Databricks workspaces that use Azure Active Directory for identity management. Uses Azure Service Principal authentication through Azure AD.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Token $(id="token")
|
||||
Generated Token to connect to Databricks. E.g., `dapw488e89a7176f7eb39bbc718617891564`.
|
||||
Personal Access Token (PAT) for authenticating with Databricks workspace.
|
||||
(e.g., `dapi1234567890abcdef`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Client ID $(id="clientId")
|
||||
The Application ID of your Databricks Service Principal for OAuth2 authentication.
|
||||
(e.g., `12345678-1234-1234-1234-123456789abc`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Client Secret $(id="clientSecret")
|
||||
OAuth secret for the Databricks Service Principal.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Client ID $(id="azureClientId")
|
||||
Azure Active Directory Application (client) ID for Azure Databricks authentication.
|
||||
(e.g., `a1b2c3d4-e5f6-7890-abcd-ef1234567890`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Client Secret $(id="azureClientSecret")
|
||||
Secret key for the Azure AD Application.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Tenant ID $(id="azureTenantId")
|
||||
Your Azure Active Directory tenant identifier.
|
||||
(e.g., `98765432-dcba-4321-abcd-1234567890ab`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
|
||||
@ -33,10 +33,51 @@ If you are running the OpenMetadata ingestion in a docker and your services are
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Token $(id="token")
|
||||
Generated Token to connect to Databricks. E.g., `dapw488e89a7176f7eb39bbc718617891564`.
|
||||
### Authentication Type $(id="authType")
|
||||
Select the authentication method to connect to your Databricks workspace.
|
||||
|
||||
- **Personal Access Token**: Generated Personal Access Token for Databricks workspace authentication.
|
||||
|
||||
- **Databricks OAuth**: OAuth2 Machine-to-Machine authentication using a Service Principal.
|
||||
|
||||
- **Azure AD Setup**: Specifically for Azure Databricks workspaces that use Azure Active Directory for identity management. Uses Azure Service Principal authentication through Azure AD.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Token $(id="token")
|
||||
Personal Access Token (PAT) for authenticating with Databricks workspace.
|
||||
(e.g., `dapi1234567890abcdef`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Client ID $(id="clientId")
|
||||
The Application ID of your Databricks Service Principal for OAuth2 authentication.
|
||||
(e.g., `12345678-1234-1234-1234-123456789abc`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Client Secret $(id="clientSecret")
|
||||
OAuth secret for the Databricks Service Principal.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Client ID $(id="azureClientId")
|
||||
Azure Active Directory Application (client) ID for Azure Databricks authentication.
|
||||
(e.g., `a1b2c3d4-e5f6-7890-abcd-ef1234567890`)
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Client Secret $(id="azureClientSecret")
|
||||
Secret key for the Azure AD Application.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Azure Tenant ID $(id="azureTenantId")
|
||||
Your Azure Active Directory tenant identifier.
|
||||
(e.g., `98765432-dcba-4321-abcd-1234567890ab`)
|
||||
$$
|
||||
|
||||
|
||||
$$section
|
||||
### HTTP Path $(id="httpPath")
|
||||
Databricks compute resources URL. E.g., `/sql/1.0/warehouses/xyz123`.
|
||||
|
||||
@ -437,12 +437,12 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Generated Token to connect to OpenAPI Schema.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -943,6 +943,18 @@ export interface ConfigObject {
|
||||
* Establish secure connection with clickhouse
|
||||
*/
|
||||
secure?: boolean;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -1025,16 +1037,6 @@ export interface ConfigObject {
|
||||
* Authentication mode to connect to Impala.
|
||||
*/
|
||||
authMechanism?: AuthMechanismEnum;
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Use slow logs to extract lineage.
|
||||
*/
|
||||
@ -2071,6 +2073,16 @@ export enum AuthProvider {
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -2103,7 +2115,34 @@ export enum AuthProvider {
|
||||
*
|
||||
* Configuration for connecting to Ranger Basic Auth.
|
||||
*/
|
||||
export interface AuthConfigurationType {
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*
|
||||
@ -3041,7 +3080,7 @@ export interface ConfigConnection {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
@ -3112,7 +3151,7 @@ export interface ConfigConnection {
|
||||
*
|
||||
* Azure Database Connection Config
|
||||
*/
|
||||
export interface ConnectionAuthConfigurationType {
|
||||
export interface AuthConfigurationType {
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
@ -3531,7 +3570,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
|
||||
@ -546,6 +546,12 @@ export interface ConfigObject {
|
||||
* Establish secure connection with clickhouse
|
||||
*/
|
||||
secure?: boolean;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -570,10 +576,6 @@ export interface ConfigObject {
|
||||
* Table name to fetch the query history.
|
||||
*/
|
||||
queryHistoryTable?: string;
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
||||
*/
|
||||
@ -626,10 +628,6 @@ export interface ConfigObject {
|
||||
* Authentication mode to connect to Impala.
|
||||
*/
|
||||
authMechanism?: AuthMechanismEnum;
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Use slow logs to extract lineage.
|
||||
*/
|
||||
@ -913,6 +911,16 @@ export enum AuthMechanismEnum {
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -923,7 +931,34 @@ export enum AuthMechanismEnum {
|
||||
*
|
||||
* Configuration for connecting to DataStax Astra DB in the cloud.
|
||||
*/
|
||||
export interface AuthConfigurationType {
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
@ -1659,7 +1694,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
@ -1751,7 +1786,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
*
|
||||
* Azure Database Connection Config
|
||||
*/
|
||||
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
|
||||
export interface AuthConfigurationType {
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
|
||||
@ -2512,10 +2512,10 @@ export interface ConfigObject {
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -2972,6 +2972,8 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
@ -4098,6 +4100,16 @@ export enum AuthProvider {
|
||||
*
|
||||
* Access Token Auth Credentials
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -4151,8 +4163,35 @@ export interface AuthenticationTypeForTableau {
|
||||
* Personal Access Token Secret.
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* JWT to connect to source.
|
||||
*/
|
||||
|
||||
@ -106,6 +106,10 @@ export interface Connection {
|
||||
* Snowflake warehouse.
|
||||
*/
|
||||
warehouse?: string;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*/
|
||||
authType?: AuthenticationType;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -132,10 +136,47 @@ export interface Connection {
|
||||
* Databricks compute resources URL.
|
||||
*/
|
||||
httpPath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*/
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -14,6 +14,10 @@
|
||||
* Databricks Connection Config
|
||||
*/
|
||||
export interface DatabricksConnection {
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*/
|
||||
authType: AuthenticationType;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -46,16 +50,53 @@ export interface DatabricksConnection {
|
||||
* SQLAlchemy driver scheme options.
|
||||
*/
|
||||
scheme?: DatabricksScheme;
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
*/
|
||||
token: string;
|
||||
/**
|
||||
* Service Type
|
||||
*/
|
||||
type?: DatabricksType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*/
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* SQLAlchemy driver scheme options.
|
||||
*/
|
||||
|
||||
@ -319,12 +319,12 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Generated Token to connect to OpenAPI Schema.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -825,6 +825,18 @@ export interface ConfigObject {
|
||||
* Establish secure connection with clickhouse
|
||||
*/
|
||||
secure?: boolean;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -907,16 +919,6 @@ export interface ConfigObject {
|
||||
* Authentication mode to connect to Impala.
|
||||
*/
|
||||
authMechanism?: AuthMechanismEnum;
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Use slow logs to extract lineage.
|
||||
*/
|
||||
@ -1953,6 +1955,16 @@ export enum AuthProvider {
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -1985,7 +1997,34 @@ export enum AuthProvider {
|
||||
*
|
||||
* Configuration for connecting to Ranger Basic Auth.
|
||||
*/
|
||||
export interface AuthConfigurationType {
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*
|
||||
@ -2923,7 +2962,7 @@ export interface ConfigConnection {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
@ -2994,7 +3033,7 @@ export interface ConfigConnection {
|
||||
*
|
||||
* Azure Database Connection Config
|
||||
*/
|
||||
export interface ConnectionAuthConfigurationType {
|
||||
export interface AuthConfigurationType {
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
@ -3413,7 +3452,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
|
||||
@ -879,12 +879,12 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Generated Token to connect to OpenAPI Schema.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -1385,6 +1385,18 @@ export interface ConfigObject {
|
||||
* Establish secure connection with clickhouse
|
||||
*/
|
||||
secure?: boolean;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -1467,16 +1479,6 @@ export interface ConfigObject {
|
||||
* Authentication mode to connect to Impala.
|
||||
*/
|
||||
authMechanism?: AuthMechanismEnum;
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
*
|
||||
* Authentication type to connect to Apache Ranger.
|
||||
*/
|
||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Use slow logs to extract lineage.
|
||||
*/
|
||||
@ -2442,6 +2444,16 @@ export enum AuthMechanismEnum {
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -2474,7 +2486,34 @@ export enum AuthMechanismEnum {
|
||||
*
|
||||
* Configuration for connecting to Ranger Basic Auth.
|
||||
*/
|
||||
export interface AuthConfigurationType {
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*
|
||||
@ -3382,7 +3421,7 @@ export interface ConfigConnection {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
@ -3453,7 +3492,7 @@ export interface ConfigConnection {
|
||||
*
|
||||
* Azure Database Connection Config
|
||||
*/
|
||||
export interface ConnectionAuthConfigurationType {
|
||||
export interface AuthConfigurationType {
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
@ -3861,7 +3900,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: ConnectionAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
|
||||
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright 2025 Collate.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*/
|
||||
export interface AzureAdSetup {
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId: string;
|
||||
}
|
||||
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright 2025 Collate.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*/
|
||||
export interface DatabricksOAuth {
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret: string;
|
||||
}
|
||||
@ -0,0 +1,22 @@
|
||||
/*
|
||||
* Copyright 2025 Collate.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*/
|
||||
export interface PersonalAccessToken {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token: string;
|
||||
}
|
||||
@ -14,6 +14,10 @@
|
||||
* Databricks Connection Config
|
||||
*/
|
||||
export interface DatabricksConnection {
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*/
|
||||
authType: AuthenticationType;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -70,16 +74,53 @@ export interface DatabricksConnection {
|
||||
* Regex to only include/exclude tables that matches the pattern.
|
||||
*/
|
||||
tableFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
*/
|
||||
token: string;
|
||||
/**
|
||||
* Service Type
|
||||
*/
|
||||
type?: DatabricksType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*/
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex to only include/exclude databases that matches the pattern.
|
||||
*
|
||||
|
||||
@ -14,6 +14,10 @@
|
||||
* UnityCatalog Connection Config
|
||||
*/
|
||||
export interface UnityCatalogConnection {
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*/
|
||||
authType: AuthenticationType;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -66,16 +70,53 @@ export interface UnityCatalogConnection {
|
||||
* Regex to only include/exclude tables that matches the pattern.
|
||||
*/
|
||||
tableFilterPattern?: FilterPattern;
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
*/
|
||||
token: string;
|
||||
/**
|
||||
* Service Type
|
||||
*/
|
||||
type?: DatabricksType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*/
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex to only include/exclude databases that matches the pattern.
|
||||
*
|
||||
|
||||
@ -304,10 +304,10 @@ export interface ConfigObject {
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -764,6 +764,8 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
@ -1942,6 +1944,16 @@ export enum AuthProvider {
|
||||
*
|
||||
* Access Token Auth Credentials
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -1995,8 +2007,35 @@ export interface AuthenticationTypeForTableau {
|
||||
* Personal Access Token Secret.
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* JWT to connect to source.
|
||||
*/
|
||||
|
||||
@ -665,6 +665,12 @@ export interface ConfigObject {
|
||||
* Establish secure connection with clickhouse
|
||||
*/
|
||||
secure?: boolean;
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: AuthenticationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
|
||||
* would like to restrict the metadata reading to a single catalog. When left blank,
|
||||
@ -689,10 +695,6 @@ export interface ConfigObject {
|
||||
* Table name to fetch the query history.
|
||||
*/
|
||||
queryHistoryTable?: string;
|
||||
/**
|
||||
* Generated Token to connect to Databricks.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* CLI Driver version to connect to DB2. If not provided, the latest version will be used.
|
||||
*/
|
||||
@ -745,10 +747,6 @@ export interface ConfigObject {
|
||||
* Authentication mode to connect to Impala.
|
||||
*/
|
||||
authMechanism?: AuthMechanismEnum;
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: AuthConfigurationType | NoConfigAuthenticationTypes;
|
||||
/**
|
||||
* Use slow logs to extract lineage.
|
||||
*/
|
||||
@ -1032,6 +1030,16 @@ export enum AuthMechanismEnum {
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -1042,7 +1050,34 @@ export enum AuthMechanismEnum {
|
||||
*
|
||||
* Configuration for connecting to DataStax Astra DB in the cloud.
|
||||
*/
|
||||
export interface AuthConfigurationType {
|
||||
export interface AuthenticationType {
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
@ -1778,7 +1813,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
/**
|
||||
* Choose Auth Config Type.
|
||||
*/
|
||||
authType?: HiveMetastoreConnectionDetailsAuthConfigurationType;
|
||||
authType?: AuthConfigurationType;
|
||||
/**
|
||||
* Custom OpenMetadata Classification name for Postgres policy tags.
|
||||
*/
|
||||
@ -1870,7 +1905,7 @@ export interface HiveMetastoreConnectionDetails {
|
||||
*
|
||||
* Azure Database Connection Config
|
||||
*/
|
||||
export interface HiveMetastoreConnectionDetailsAuthConfigurationType {
|
||||
export interface AuthConfigurationType {
|
||||
/**
|
||||
* Password to connect to source.
|
||||
*/
|
||||
|
||||
@ -3023,10 +3023,10 @@ export interface ConfigObject {
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -3483,6 +3483,8 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
@ -4590,6 +4592,16 @@ export enum AuthMechanismEnum {
|
||||
*
|
||||
* Access Token Auth Credentials
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -4643,8 +4655,35 @@ export interface AuthenticationTypeForTableau {
|
||||
* Personal Access Token Secret.
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* JWT to connect to source.
|
||||
*/
|
||||
|
||||
@ -348,10 +348,10 @@ export interface ConfigObject {
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -808,6 +808,8 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
@ -1986,6 +1988,16 @@ export enum AuthProvider {
|
||||
*
|
||||
* Access Token Auth Credentials
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -2039,8 +2051,35 @@ export interface AuthenticationTypeForTableau {
|
||||
* Personal Access Token Secret.
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* JWT to connect to source.
|
||||
*/
|
||||
|
||||
@ -388,10 +388,10 @@ export interface ConfigObject {
|
||||
*
|
||||
* token to connect to Qlik Cloud.
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* To Connect to Dagster Cloud
|
||||
*
|
||||
* Generated Token to connect to Databricks.
|
||||
*
|
||||
* Generated Token to connect to DBTCloud.
|
||||
*
|
||||
* Token to connect to Stitch api doc
|
||||
@ -848,6 +848,8 @@ export interface ConfigObject {
|
||||
/**
|
||||
* Types of methods used to authenticate to the tableau instance
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Types of methods used to authenticate to the alation instance
|
||||
@ -2039,6 +2041,16 @@ export enum AuthProvider {
|
||||
*
|
||||
* Access Token Auth Credentials
|
||||
*
|
||||
* Choose between different authentication types for Databricks.
|
||||
*
|
||||
* Personal Access Token authentication for Databricks.
|
||||
*
|
||||
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
|
||||
* Databricks.
|
||||
*
|
||||
* Azure Active Directory authentication for Azure Databricks workspaces using Service
|
||||
* Principal.
|
||||
*
|
||||
* Choose Auth Config Type.
|
||||
*
|
||||
* Common Database Connection Config
|
||||
@ -2092,8 +2104,35 @@ export interface AuthenticationTypeForTableau {
|
||||
* Personal Access Token Secret.
|
||||
*/
|
||||
personalAccessTokenSecret?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* Generated Personal Access Token for Databricks workspace authentication. This token is
|
||||
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* Service Principal Application ID created in your Databricks Account Console for OAuth
|
||||
* Machine-to-Machine authentication.
|
||||
*/
|
||||
clientId?: string;
|
||||
/**
|
||||
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
|
||||
* secure OAuth2 authentication.
|
||||
*/
|
||||
clientSecret?: string;
|
||||
/**
|
||||
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
|
||||
*/
|
||||
azureClientId?: string;
|
||||
/**
|
||||
* Azure Service Principal client secret created in Azure AD for authentication.
|
||||
*/
|
||||
azureClientSecret?: string;
|
||||
/**
|
||||
* Azure Active Directory Tenant ID where your Service Principal is registered.
|
||||
*/
|
||||
azureTenantId?: string;
|
||||
awsConfig?: AWSCredentials;
|
||||
azureConfig?: AzureCredentials;
|
||||
/**
|
||||
* JWT to connect to source.
|
||||
*/
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user