fix: generated tx files

This commit is contained in:
Keshav Mohta 2025-09-29 00:50:17 +05:30
parent 91e9763fa0
commit f15ec10444
No known key found for this signature in database
GPG Key ID: 9481AB99C36FAE9C
11 changed files with 45886 additions and 0 deletions

View File

@ -0,0 +1,232 @@
/*
* Copyright 2025 Collate.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* UnityCatalog Connection Config
*/
export interface UnityCatalogConnection {
/**
* Choose between different authentication types for Databricks.
*/
authType: AuthenticationType;
/**
* Catalog of the data source(Example: hive_metastore). This is optional parameter, if you
* would like to restrict the metadata reading to a single catalog. When left blank,
* OpenMetadata Ingestion attempts to scan all the catalog.
*/
catalog?: string;
connectionArguments?: { [key: string]: any };
connectionOptions?: { [key: string]: string };
/**
* The maximum amount of time (in seconds) to wait for a successful connection to the data
* source. If the connection attempt takes longer than this timeout period, an error will be
* returned.
*/
connectionTimeout?: number;
/**
* Regex to only include/exclude databases that matches the pattern.
*/
databaseFilterPattern?: FilterPattern;
/**
* Database Schema of the data source. This is optional parameter, if you would like to
* restrict the metadata reading to a single schema. When left blank, OpenMetadata Ingestion
* attempts to scan all the schemas.
*/
databaseSchema?: string;
/**
* Host and port of the Databricks service.
*/
hostPort: string;
/**
* Databricks compute resources URL.
*/
httpPath?: string;
sampleDataStorageConfig?: SampleDataStorageConfig;
/**
* Regex to only include/exclude schemas that matches the pattern.
*/
schemaFilterPattern?: FilterPattern;
/**
* SQLAlchemy driver scheme options.
*/
scheme?: DatabricksScheme;
supportsDatabase?: boolean;
supportsDBTExtraction?: boolean;
supportsLineageExtraction?: boolean;
supportsMetadataExtraction?: boolean;
supportsProfiler?: boolean;
supportsQueryComment?: boolean;
supportsUsageExtraction?: boolean;
/**
* Regex to only include/exclude tables that matches the pattern.
*/
tableFilterPattern?: FilterPattern;
/**
* Service Type
*/
type?: DatabricksType;
}
/**
* Choose between different authentication types for Databricks.
*
* Personal Access Token authentication for Databricks.
*
* OAuth2 Machine-to-Machine authentication using Service Principal credentials for
* Databricks.
*
* Azure Active Directory authentication for Azure Databricks workspaces using Service
* Principal.
*/
export interface AuthenticationType {
/**
* Generated Personal Access Token for Databricks workspace authentication. This token is
* created from User Settings -> Developer -> Access Tokens in your Databricks workspace.
*/
token?: string;
/**
* Service Principal Application ID created in your Databricks Account Console for OAuth
* Machine-to-Machine authentication.
*/
clientId?: string;
/**
* OAuth Secret generated for the Service Principal in Databricks Account Console. Used for
* secure OAuth2 authentication.
*/
clientSecret?: string;
/**
* Azure Service Principal Application (client) ID registered in your Azure Active Directory.
*/
azureClientId?: string;
/**
* Azure Service Principal client secret created in Azure AD for authentication.
*/
azureClientSecret?: string;
/**
* Azure Active Directory Tenant ID where your Service Principal is registered.
*/
azureTenantId?: string;
}
/**
* Regex to only include/exclude databases that matches the pattern.
*
* Regex to only fetch entities that matches the pattern.
*
* Regex to only include/exclude schemas that matches the pattern.
*
* Regex to only include/exclude tables that matches the pattern.
*/
export interface FilterPattern {
/**
* List of strings/regex patterns to match and exclude only database entities that match.
*/
excludes?: string[];
/**
* List of strings/regex patterns to match and include only database entities that match.
*/
includes?: string[];
}
/**
* Storage config to store sample data
*/
export interface SampleDataStorageConfig {
config?: DataStorageConfig;
}
/**
* Storage config to store sample data
*/
export interface DataStorageConfig {
/**
* Bucket Name
*/
bucketName?: string;
/**
* Provide the pattern of the path where the generated sample data file needs to be stored.
*/
filePathPattern?: string;
/**
* When this field enabled a single parquet file will be created to store sample data,
* otherwise we will create a new file per day
*/
overwriteData?: boolean;
/**
* Prefix of the data source.
*/
prefix?: string;
storageConfig?: AwsCredentials;
[property: string]: any;
}
/**
* AWS credentials configs.
*/
export interface AwsCredentials {
/**
* The Amazon Resource Name (ARN) of the role to assume. Required Field in case of Assume
* Role
*/
assumeRoleArn?: string;
/**
* An identifier for the assumed role session. Use the role session name to uniquely
* identify a session when the same role is assumed by different principals or for different
* reasons. Required Field in case of Assume Role
*/
assumeRoleSessionName?: string;
/**
* The Amazon Resource Name (ARN) of the role to assume. Optional Field in case of Assume
* Role
*/
assumeRoleSourceIdentity?: string;
/**
* AWS Access key ID.
*/
awsAccessKeyId?: string;
/**
* AWS Region
*/
awsRegion?: string;
/**
* AWS Secret Access Key.
*/
awsSecretAccessKey?: string;
/**
* AWS Session Token.
*/
awsSessionToken?: string;
/**
* EndPoint URL for the AWS
*/
endPointURL?: string;
/**
* The name of a profile to use with the boto session.
*/
profileName?: string;
}
/**
* SQLAlchemy driver scheme options.
*/
export enum DatabricksScheme {
DatabricksConnector = "databricks+connector",
}
/**
* Service Type
*
* Service type.
*/
export enum DatabricksType {
UnityCatalog = "UnityCatalog",
}

File diff suppressed because it is too large Load Diff