fix: exporter databricksConnection file

This commit is contained in:
Keshav Mohta 2025-09-29 01:25:28 +05:30
parent f15ec10444
commit 64daf39163
No known key found for this signature in database
GPG Key ID: 9481AB99C36FAE9C
3 changed files with 105 additions and 11 deletions

View File

@ -40,11 +40,23 @@
"description": "Host and port of the Databricks service.", "description": "Host and port of the Databricks service.",
"type": "string" "type": "string"
}, },
"token": { "authType": {
"title": "Token", "title": "Authentication Type",
"description": "Generated Token to connect to Databricks.", "description": "Choose between different authentication types for Databricks.",
"type": "string", "oneOf": [
"format": "password" {
"title": "Personal Access Token",
"$ref": "../../../../services/connections/database/databricks/personalAccessToken.json"
},
{
"title": "Databricks OAuth",
"$ref": "../../../../services/connections/database/databricks/databricksOAuth.json"
},
{
"title": "Azure AD Setup",
"$ref": "../../../../services/connections/database/databricks/azureAdSetup.json"
}
]
}, },
"httpPath": { "httpPath": {
"title": "Http Path", "title": "Http Path",
@ -79,7 +91,7 @@
"additionalProperties": false, "additionalProperties": false,
"required": [ "required": [
"hostPort", "hostPort",
"token", "authType",
"httpPath" "httpPath"
] ]
} }

View File

@ -106,6 +106,10 @@ export interface Connection {
* Snowflake warehouse. * Snowflake warehouse.
*/ */
warehouse?: string; warehouse?: string;
/**
* Choose between different authentication types for Databricks.
*/
authType?: AuthenticationType;
/** /**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you * Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank, * would like to restrict the metadata reading to a single catalog. When left blank,
@ -132,10 +136,47 @@ export interface Connection {
* Databricks compute resources URL. * Databricks compute resources URL.
*/ */
httpPath?: string; httpPath?: string;
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*/
export interface AuthenticationType {
/** /**
* Generated Token to connect to Databricks. * Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/ */
token?: string; token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
} }
/** /**

View File

@ -14,6 +14,10 @@
* Databricks Connection Config * Databricks Connection Config
*/ */
export interface DatabricksConnection { export interface DatabricksConnection {
/**
* Choose between different authentication types for Databricks.
*/
authType: AuthenticationType;
/** /**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you * Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank, * would like to restrict the metadata reading to a single catalog. When left blank,
@ -46,16 +50,53 @@ export interface DatabricksConnection {
* SQLAlchemy driver scheme options. * SQLAlchemy driver scheme options.
*/ */
scheme?: DatabricksScheme; scheme?: DatabricksScheme;
/**
* Generated Token to connect to Databricks.
*/
token: string;
/** /**
* Service Type * Service Type
*/ */
type?: DatabricksType; type?: DatabricksType;
} }
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*/
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
}
/** /**
* SQLAlchemy driver scheme options. * SQLAlchemy driver scheme options.
*/ */