mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-12-30 00:48:52 +00:00
Snowplow Connector (#23133)
* Snowplow Connector * markdown file change * UI * ts file --------- Co-authored-by: Akash Verma <akashverma@Mac.lan>
This commit is contained in:
parent
0e5093d366
commit
421f345554
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Snowplow",
|
||||
"displayName": "Snowplow Test Connection",
|
||||
"description": "This Test Connection validates the access against the server and basic metadata extraction of pipelines.",
|
||||
"steps": [
|
||||
{
|
||||
"name": "CheckAccess",
|
||||
"description": "Check if the instance is reachable to fetch the pipeline details.",
|
||||
"errorMessage": "Failed to connect to Snowplpow, please validate the credentials",
|
||||
"shortCircuit": true,
|
||||
"mandatory": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,103 @@
|
||||
{
|
||||
"$id": "https://open-metadata.org/schema/entity/services/connections/pipeline/snowplowConnection.json",
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "SnowplowConnection",
|
||||
"description": "Snowplow Pipeline Connection Config",
|
||||
"type": "object",
|
||||
"javaType": "org.openmetadata.schema.services.connections.pipeline.SnowplowConnection",
|
||||
"definitions": {
|
||||
"SnowplowType": {
|
||||
"description": "Service type.",
|
||||
"type": "string",
|
||||
"enum": ["Snowplow"],
|
||||
"default": "Snowplow"
|
||||
},
|
||||
"SnowplowDeployment": {
|
||||
"description": "Snowplow deployment type",
|
||||
"type": "string",
|
||||
"enum": ["BDP", "Community"],
|
||||
"default": "BDP"
|
||||
}
|
||||
},
|
||||
"properties": {
|
||||
"type": {
|
||||
"title": "Service Type",
|
||||
"description": "Service Type",
|
||||
"$ref": "#/definitions/SnowplowType",
|
||||
"default": "Snowplow"
|
||||
},
|
||||
"deployment": {
|
||||
"title": "Deployment Type",
|
||||
"description": "Snowplow deployment type (BDP for managed or Community for self-hosted)",
|
||||
"$ref": "#/definitions/SnowplowDeployment",
|
||||
"default": "BDP"
|
||||
},
|
||||
"consoleUrl": {
|
||||
"expose": true,
|
||||
"title": "Console URL",
|
||||
"description": "Snowplow Console URL for BDP deployment",
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"apiKey": {
|
||||
"title": "API Key",
|
||||
"description": "API Key for Snowplow Console API",
|
||||
"type": "string",
|
||||
"format": "password"
|
||||
},
|
||||
"organizationId": {
|
||||
"title": "Organization ID",
|
||||
"description": "Snowplow BDP Organization ID",
|
||||
"type": "string"
|
||||
},
|
||||
"configPath": {
|
||||
"title": "Configuration Path",
|
||||
"description": "Path to pipeline configuration files for Community deployment",
|
||||
"type": "string"
|
||||
},
|
||||
"cloudProvider": {
|
||||
"title": "Cloud Provider",
|
||||
"description": "Cloud provider where Snowplow is deployed",
|
||||
"type": "string",
|
||||
"enum": ["AWS", "GCP", "Azure"],
|
||||
"default": "AWS"
|
||||
},
|
||||
"pipelineFilterPattern": {
|
||||
"description": "Regex exclude pipelines.",
|
||||
"$ref": "../../../../type/filterPattern.json#/definitions/filterPattern",
|
||||
"title": "Pipeline Filter Pattern"
|
||||
},
|
||||
"supportsMetadataExtraction": {
|
||||
"title": "Supports Metadata Extraction",
|
||||
"$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": ["type", "deployment"],
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"deployment": {
|
||||
"const": "BDP"
|
||||
}
|
||||
}
|
||||
},
|
||||
"then": {
|
||||
"required": ["consoleUrl", "apiKey", "organizationId"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"properties": {
|
||||
"deployment": {
|
||||
"const": "Community"
|
||||
}
|
||||
}
|
||||
},
|
||||
"then": {
|
||||
"required": ["configPath"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -36,7 +36,8 @@
|
||||
"Stitch",
|
||||
"DataFactory",
|
||||
"Wherescape",
|
||||
"SSIS"
|
||||
"SSIS",
|
||||
"Snowplow"
|
||||
],
|
||||
"javaEnums": [
|
||||
{
|
||||
@ -98,6 +99,9 @@
|
||||
},
|
||||
{
|
||||
"name": "SSIS"
|
||||
},
|
||||
{
|
||||
"name": "Snowplow"
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -171,6 +175,9 @@
|
||||
},
|
||||
{
|
||||
"$ref": "./connections/pipeline/stitchConnection.json"
|
||||
},
|
||||
{
|
||||
"$ref": "./connections/pipeline/snowplowconnection.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,90 +0,0 @@
|
||||
# ThoughtSpot
|
||||
|
||||
In this section, we provide guides and references to use the ThoughtSpot connector.
|
||||
|
||||
## Requirements
|
||||
|
||||
To access the ThoughtSpot APIs and import liveboards, charts, and data models from ThoughtSpot into OpenMetadata, you need appropriate permissions on your ThoughtSpot instance.
|
||||
|
||||
## ThoughtSpot Account Setup and Permissions
|
||||
|
||||
### Step 1: Authentication Setup
|
||||
|
||||
ThoughtSpot supports multiple authentication methods:
|
||||
|
||||
**Basic Authentication:**
|
||||
- Username and password authentication
|
||||
- The user should have appropriate permissions to read metadata from ThoughtSpot
|
||||
|
||||
**API Access Token Authentication:**
|
||||
- Use ThoughtSpot API access tokens for authentication
|
||||
- Generate API access tokens from your ThoughtSpot instance
|
||||
|
||||
### Step 2: API Permissions
|
||||
|
||||
Ensure your ThoughtSpot user or service account has the following permissions:
|
||||
- Read access to liveboards and answers
|
||||
- Read access to worksheets and data models
|
||||
- Access to metadata APIs
|
||||
- Export permissions for TML (ThoughtSpot Modeling Language) data
|
||||
|
||||
### Step 3: Multi-tenant Configuration (Optional)
|
||||
|
||||
If you're using ThoughtSpot Cloud with multiple organizations:
|
||||
- Set the `Organization ID` parameter to specify which organization to connect to
|
||||
- This is only applicable for ThoughtSpot Cloud instances
|
||||
|
||||
You can find further information on the ThoughtSpot connector in the <a href="https://docs.open-metadata.org/connectors/dashboard/thoughtspot" target="_blank">docs</a>.
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### Host and Port $(id="hostPort")
|
||||
|
||||
The URL of your ThoughtSpot instance. This should be the base URL of your ThoughtSpot deployment.
|
||||
|
||||
**Examples:**
|
||||
- For ThoughtSpot Cloud: `https://my-company.thoughtspot.cloud`
|
||||
- For on-premise: `https://thoughtspot.company.com`
|
||||
- For local development: `https://localhost`
|
||||
|
||||
If you are running the OpenMetadata ingestion in a docker and your ThoughtSpot instance is hosted on `localhost`, then use `host.docker.internal` as the hostname.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Authentication $(id="authentication")
|
||||
|
||||
Choose the authentication method for connecting to ThoughtSpot:
|
||||
|
||||
**Basic Authentication:**
|
||||
- **Username**: Your ThoughtSpot username
|
||||
- **Password**: Your ThoughtSpot password
|
||||
|
||||
**API Access Token Authentication:**
|
||||
- **Access Token**: Your ThoughtSpot API access token
|
||||
$$
|
||||
|
||||
$$section
|
||||
### API Version $(id="apiVersion")
|
||||
|
||||
The ThoughtSpot API version to use for metadata extraction.
|
||||
|
||||
**Options:**
|
||||
- **v1**: Legacy API version (callosum endpoints)
|
||||
- **v2**: Current API version (recommended)
|
||||
|
||||
**Default:** v2
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Organization ID $(id="orgId")
|
||||
|
||||
Organization ID for multi-tenant ThoughtSpot instances. This parameter is only applicable for ThoughtSpot Cloud deployments.
|
||||
|
||||
**Usage:**
|
||||
- Leave empty for single-tenant instances
|
||||
- Set to your organization ID for multi-tenant ThoughtSpot Cloud
|
||||
- This helps identify which organization's data to extract
|
||||
|
||||
**Default:** null (single-tenant)
|
||||
$$
|
||||
@ -1,27 +0,0 @@
|
||||
# Epic
|
||||
|
||||
In this section, we provide guides and references to use the Epic FHIR connector.
|
||||
|
||||
You can find further information on the Epic connector in the <a href="https://docs.open-metadata.org/connectors/database/epic" target="_blank">docs</a>.
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### FHIR Server URL $(id="fhirServerUrl")
|
||||
Base URL of the Epic FHIR server, e.g. `https://fhir.epic.com/interconnect-fhir-oauth/api/FHIR/R4/`.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### FHIR Version $(id="fhirVersion")
|
||||
FHIR specification version supported by the server. Supported values are `R4`, `STU3`, and `DSTU2`.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Database Name $(id="databaseName")
|
||||
Optional name to give to the database in OpenMetadata. If left blank, `epic` will be used as the default value.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Supports Metadata Extraction $(id="supportsMetadataExtraction")
|
||||
Indicates whether OpenMetadata should attempt to extract metadata from the Epic FHIR server in addition to creating the service connection.
|
||||
$$
|
||||
@ -1,34 +0,0 @@
|
||||
# SSAS
|
||||
|
||||
In this section, we provide guides and references to use the Ssas connector.
|
||||
|
||||
## Requirements
|
||||
To use the SSAS connector, ensure that your SSAS instance is accessible via HTTP (XMLA endpoint) and that HTTP access is enabled.
|
||||
|
||||
|
||||
$$note
|
||||
- The HTTP endpoint must support **Basic Authentication** using a username and password.
|
||||
- The user account provided must have sufficient privileges/permissions to extract metadata from SSAS (such as access to read database, tables, and schema information).
|
||||
- The SSAS models you want to extract metadata from must be deployed and accessible over HTTP (XMLA endpoint).
|
||||
- The endpoint should be reachable from the OpenMetadata server.
|
||||
|
||||
If you are unsure about your permissions, contact your SSAS administrator to confirm that your account can access and read metadata via the XMLA endpoint.
|
||||
$$
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### HTTP Connection URL $(id="httpConnection")
|
||||
URL to your SSAS XMLA endpoint. OpenMetadata will connect to this endpoint to extract metadata.
|
||||
An example endpoint is `http://<your-server>/olap/msmdpump.dll`.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Username $(id="username")
|
||||
Username to connect to SSAS. This user should have privileges to read all the metadata from the SSAS instance.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Password $(id="password")
|
||||
Password to connect to SSAS.
|
||||
$$
|
||||
@ -1,252 +0,0 @@
|
||||
# SSIS
|
||||
|
||||
In this section, we provide guides and references to use the SSIS connector.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. **SSIS Connector**: which we will configure in this section and requires access to the underlying database.
|
||||
|
||||
|
||||
## Connection Details
|
||||
|
||||
|
||||
$$section
|
||||
### Metadata Database Connection $(id="connection")
|
||||
|
||||
SSIS uses MSSQL as its database.
|
||||
|
||||
$$
|
||||
|
||||
## Mssql Connection
|
||||
|
||||
In this section, we provide guides and references to use the MSSQL connection.
|
||||
|
||||
## Requirements
|
||||
|
||||
The user must have `SELECT` privileges to fetch the metadata of tables and views.
|
||||
|
||||
```sql
|
||||
-- Create a new user
|
||||
-- More details https://learn.microsoft.com/en-us/sql/t-sql/statements/create-user-transact-sql?view=sql-server-ver16
|
||||
CREATE USER Mary WITH PASSWORD = '********';
|
||||
-- Grant SELECT on table
|
||||
GRANT SELECT TO Mary;
|
||||
```
|
||||
|
||||
### Remote Connection
|
||||
|
||||
#### 1. SQL Server running
|
||||
|
||||
Make sure the SQL server that you are trying to connect is in running state.
|
||||
|
||||
#### 2. Allow remote connection on MSSMS (Microsoft SQL Server Management Studio)
|
||||
|
||||
This step allow the sql server to accept remote connection request.
|
||||
|
||||

|
||||
|
||||
#### 3. Configure Windows Firewall
|
||||
|
||||
If you are using SQL server on Windows, you must configure the firewall on the computer running SQL Server to allow access.
|
||||
|
||||
1. On the Start menu, select `Run`, type `WF.msc`, and then select `OK`.
|
||||
2. In the `Windows Firewall with Advanced Security`, in the left pane, right-click` Inbound Rules`, and then select `New Rule` in the action pane.
|
||||
3. In the `Rule Type` dialog box, select `Port`, and then select `Next`.
|
||||
4. In the `Protocol and Ports` dialog box, select `TCP`. Select Specific local ports, and then type the port number of the instance of the Database Engine, such as 1433 for the default instance. Select `Next`.
|
||||
5. In the `Action` dialog box, select `Allow` the connection, and then select Next.
|
||||
6. In the `Profile` dialog box, select any profiles that describe the computer connection environment when you want to connect to the Database Engine, and then select `Next`.
|
||||
7. In the `Name` dialog box, type a name and description for this rule, and then select `Finish`.
|
||||
|
||||
For details step please refer this <a href="https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/configure-a-windows-firewall-for-database-engine-access?view=sql-server-ver15" target="_blank">link</a>.
|
||||
|
||||
You can find further information on the MSSQL connector in the <a href="https://docs.open-metadata.org/connectors/database/mssql" target="_blank">docs</a>.
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### Scheme $(id="scheme")
|
||||
There are three schemes based on the user's requirement to fetch data from MSSQL:
|
||||
- **mssql+pytds**: High-performance open-source library for connecting to Microsoft SQL Server.
|
||||
- **mssql+pyodbc**: Cross-platform Python library that uses ODBC to connect to Microsoft SQL Server.
|
||||
- **mssql+pymssql**: Python library that uses FreeTDS to connect to Microsoft SQL Server, with support for bulk data transfer and query timeouts.
|
||||
|
||||
If you are connecting via windows authentication from a linux docker deployment please use `mssql+pymssql`.
|
||||
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Username $(id="username")
|
||||
|
||||
Username to connect to MSSQL. This user should have privileges to read all the metadata in MSSQL.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Password $(id="password")
|
||||
|
||||
Password to connect to MSSQL.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Host Port $(id="hostPort")
|
||||
|
||||
This parameter specifies the host and port of the MSSQL instance. This should be specified as a string in the format `hostname:port`. For example, you might set the hostPort parameter to `localhost:1433`.
|
||||
|
||||
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:1433` as the value.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Database $(id="database")
|
||||
|
||||
Initial Mssql database to connect to.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Driver $(id="driver")
|
||||
|
||||
Connecting to MSSQL via **pyodbc** scheme requires the ODBC driver to be installed. Specify ODBC driver name in the field.
|
||||
|
||||
You can download the ODBC driver from <a href="https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver16" target="_blank">here</a>.
|
||||
|
||||
In case of Docker or Kubernetes deployments, this driver comes out of the box with version `ODBC Driver 18 for SQL Server`.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Project Location $(id="packageConnection")
|
||||
|
||||
You have two options to provide your SSIS projects for ingestion:
|
||||
|
||||
1. **Local Path**:<span style="font-size: 1.1em; color: #b22222; font-style: italic; font-weight: bold;">If you are using the Local Path option, you must run the ingestion workflow through the CLI instead of the UI.</span>
|
||||
2. **S3 Bucket**: Upload your SSIS projects to an S3 bucket and provide the bucket name along with the necessary S3 credentials.
|
||||
|
||||
You can choose either of these methods based on your setup and requirements.
|
||||
$$
|
||||
|
||||
|
||||
## S3Connection
|
||||
|
||||
In this section, we provide guides and references to use the S3 connector.
|
||||
|
||||
## Requirements
|
||||
|
||||
We need the following permissions in AWS:
|
||||
|
||||
### S3 Permissions
|
||||
|
||||
For all the buckets that we want to ingest, we need to provide the following:
|
||||
- `s3:ListBucket`
|
||||
- `s3:GetObject`
|
||||
|
||||
Note that the `Resources` should be all the buckets that you'd like to scan. A possible policy could be:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"s3:GetObject",
|
||||
"s3:ListBucket",
|
||||
],
|
||||
"Resource": [
|
||||
"arn:aws:s3:::*"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### AWS Access Key ID $(id="awsAccessKeyId")
|
||||
|
||||
When you interact with AWS, you specify your AWS security credentials to verify who you are and whether you have permission to access the resources that you are requesting. AWS uses the security credentials to authenticate and authorize your requests (<a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/security-creds.html" target="_blank">docs</a>).
|
||||
|
||||
Access keys consist of two parts:
|
||||
1. An access key ID (for example, `AKIAIOSFODNN7EXAMPLE`),
|
||||
2. And a secret access key (for example, `wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY`).
|
||||
|
||||
You must use both the access key ID and secret access key together to authenticate your requests.
|
||||
|
||||
You can find further information on how to manage your access keys <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html" target="_blank">here</a>
|
||||
$$
|
||||
|
||||
$$section
|
||||
### AWS Secret Access Key $(id="awsSecretAccessKey")
|
||||
|
||||
Secret access key (for example, `wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY`).
|
||||
$$
|
||||
|
||||
$$section
|
||||
### AWS Region $(id="awsRegion")
|
||||
|
||||
Each AWS Region is a separate geographic area in which AWS clusters data centers (<a href="https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html" target="_blank">docs</a>).
|
||||
|
||||
As AWS can have instances in multiple regions, we need to know the region the service you want reach belongs to.
|
||||
|
||||
Note that the AWS Region is the only required parameter when configuring a connection. When connecting to the services programmatically, there are different ways in which we can extract and use the rest of AWS configurations. You can find further information about configuring your credentials <a href="https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials" target="_blank">here</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### AWS Session Token $(id="awsSessionToken")
|
||||
|
||||
If you are using temporary credentials to access your services, you will need to inform the AWS Access Key ID and AWS Secrets Access Key. Also, these will include an AWS Session Token.
|
||||
|
||||
You can find more information on <a href="https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_use-resources.html" target="_blank">Using temporary credentials with AWS resources</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Endpoint URL $(id="endPointURL")
|
||||
|
||||
To connect programmatically to an AWS service, you use an endpoint. An *endpoint* is the URL of the entry point for an AWS web service. The AWS SDKs and the AWS Command Line Interface (AWS CLI) automatically use the default endpoint for each service in an AWS Region. But you can specify an alternate endpoint for your API requests.
|
||||
|
||||
Find more information on <a href="https://docs.aws.amazon.com/general/latest/gr/rande.html" target="_blank">AWS service endpoints</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Profile Name $(id="profileName")
|
||||
|
||||
A named profile is a collection of settings and credentials that you can apply to an AWS CLI command. When you specify a profile to run a command, the settings and credentials are used to run that command. Multiple named profiles can be stored in the config and credentials files.
|
||||
|
||||
You can inform this field if you'd like to use a profile other than `default`.
|
||||
|
||||
Find here more information about <a href="https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html" target="_blank">Named profiles for the AWS CLI</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Assume Role ARN $(id="assumeRoleArn")
|
||||
|
||||
Typically, you use `AssumeRole` within your account or for cross-account access. In this field you'll set the `ARN` (Amazon Resource Name) of the policy of the other account.
|
||||
|
||||
A user who wants to access a role in a different account must also have permissions that are delegated from the account administrator. The administrator must attach a policy that allows the user to call `AssumeRole` for the `ARN` of the role in the other account.
|
||||
|
||||
This is a required field if you'd like to `AssumeRole`.
|
||||
|
||||
Find more information on <a href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html" target="_blank">AssumeRole</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Assume Role Session Name $(id="assumeRoleSessionName")
|
||||
|
||||
An identifier for the assumed role session. Use the role session name to uniquely identify a session when the same role is assumed by different principals or for different reasons.
|
||||
|
||||
By default, we'll use the name `OpenMetadataSession`.
|
||||
|
||||
Find more information about the <a href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html#:~:text=An%20identifier%20for%20the%20assumed%20role%20session." target="_blank">Role Session Name</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Assume Role Source Identity $(id="assumeRoleSourceIdentity")
|
||||
|
||||
The source identity specified by the principal that is calling the `AssumeRole` operation. You can use source identity information in AWS CloudTrail logs to determine who took actions with a role.
|
||||
|
||||
Find more information about <a href="https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html#:~:text=Required%3A%20No-,SourceIdentity,-The%20source%20identity" target="_blank">Source Identity</a>.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Bucket Name $(id="bucketNames")
|
||||
|
||||
Provide the name of the bucket that contains your project folders
|
||||
$$
|
||||
@ -1,132 +0,0 @@
|
||||
# Wherescape
|
||||
|
||||
In this section, we provide guides and references to use the Wherescape connector.
|
||||
|
||||
## Requirements
|
||||
|
||||
1. **Wherescape Connector**: which we will configure in this section and requires access to the underlying database.
|
||||
|
||||
|
||||
## Connection Details
|
||||
|
||||
|
||||
$$section
|
||||
### Metadata Database Connection $(id="connection")
|
||||
|
||||
Select your underlying database connection. We support the MSSQL database from Wherescape
|
||||
|
||||
$$
|
||||
|
||||
## Mssql Connection
|
||||
|
||||
In this section, we provide guides and references to use the MSSQL connection.
|
||||
|
||||
## Requirements
|
||||
|
||||
The user must have `SELECT` privileges to fetch the metadata of tables and views.
|
||||
|
||||
```sql
|
||||
-- Create a new user
|
||||
-- More details https://learn.microsoft.com/en-us/sql/t-sql/statements/create-user-transact-sql?view=sql-server-ver16
|
||||
CREATE USER Mary WITH PASSWORD = '********';
|
||||
-- Grant SELECT on table
|
||||
GRANT SELECT TO Mary;
|
||||
```
|
||||
|
||||
### Remote Connection
|
||||
|
||||
#### 1. SQL Server running
|
||||
|
||||
Make sure the SQL server that you are trying to connect is in running state.
|
||||
|
||||
#### 2. Allow remote connection on MSSMS (Microsoft SQL Server Management Studio)
|
||||
|
||||
This step allow the sql server to accept remote connection request.
|
||||
|
||||

|
||||
|
||||
#### 3. Configure Windows Firewall
|
||||
|
||||
If you are using SQL server on Windows, you must configure the firewall on the computer running SQL Server to allow access.
|
||||
|
||||
1. On the Start menu, select `Run`, type `WF.msc`, and then select `OK`.
|
||||
2. In the `Windows Firewall with Advanced Security`, in the left pane, right-click` Inbound Rules`, and then select `New Rule` in the action pane.
|
||||
3. In the `Rule Type` dialog box, select `Port`, and then select `Next`.
|
||||
4. In the `Protocol and Ports` dialog box, select `TCP`. Select Specific local ports, and then type the port number of the instance of the Database Engine, such as 1433 for the default instance. Select `Next`.
|
||||
5. In the `Action` dialog box, select `Allow` the connection, and then select Next.
|
||||
6. In the `Profile` dialog box, select any profiles that describe the computer connection environment when you want to connect to the Database Engine, and then select `Next`.
|
||||
7. In the `Name` dialog box, type a name and description for this rule, and then select `Finish`.
|
||||
|
||||
For details step please refer this <a href="https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/configure-a-windows-firewall-for-database-engine-access?view=sql-server-ver15" target="_blank">link</a>.
|
||||
|
||||
You can find further information on the MSSQL connector in the <a href="https://docs.open-metadata.org/connectors/database/mssql" target="_blank">docs</a>.
|
||||
|
||||
## Connection Details
|
||||
|
||||
$$section
|
||||
### Scheme $(id="scheme")
|
||||
There are three schemes based on the user's requirement to fetch data from MSSQL:
|
||||
- **mssql+pytds**: High-performance open-source library for connecting to Microsoft SQL Server.
|
||||
- **mssql+pyodbc**: Cross-platform Python library that uses ODBC to connect to Microsoft SQL Server.
|
||||
- **mssql+pymssql**: Python library that uses FreeTDS to connect to Microsoft SQL Server, with support for bulk data transfer and query timeouts.
|
||||
|
||||
If you are connecting via windows authentication from a linux docker deployment please use `mssql+pymssql`.
|
||||
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Username $(id="username")
|
||||
|
||||
Username to connect to MSSQL. This user should have privileges to read all the metadata in MSSQL.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Password $(id="password")
|
||||
|
||||
Password to connect to MSSQL.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Host Port $(id="hostPort")
|
||||
|
||||
This parameter specifies the host and port of the MSSQL instance. This should be specified as a string in the format `hostname:port`. For example, you might set the hostPort parameter to `localhost:1433`.
|
||||
|
||||
If you are running the OpenMetadata ingestion in a docker and your services are hosted on the `localhost`, then use `host.docker.internal:1433` as the value.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Database $(id="database")
|
||||
|
||||
Provide the name of the database which contains Wherescape tables.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Driver $(id="driver")
|
||||
|
||||
Connecting to MSSQL via **pyodbc** scheme requires the ODBC driver to be installed. Specify ODBC driver name in the field.
|
||||
|
||||
You can download the ODBC driver from <a href="https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver16" target="_blank">here</a>.
|
||||
|
||||
In case of Docker or Kubernetes deployments, this driver comes out of the box with version `ODBC Driver 18 for SQL Server`.
|
||||
$$
|
||||
|
||||
|
||||
$$section
|
||||
### Ingest All Databases $(id="ingestAllDatabases")
|
||||
Not applicable for Wherescape
|
||||
$$
|
||||
|
||||
|
||||
$$section
|
||||
### Connection Options $(id="connectionOptions")
|
||||
|
||||
Additional connection options to build the URL that can be sent to service during the connection.
|
||||
$$
|
||||
|
||||
$$section
|
||||
### Connection Arguments $(id="connectionArguments")
|
||||
|
||||
Enter the details for any additional connection arguments such as security or protocol configs that can be sent to MSSQL during the connection. These details must be added as Key-Value pairs.
|
||||
|
||||
When Connecting to MSSQL via **pyodbc** scheme requires the Connection Arguments Encrypt: No and TrustServerCertificate: Yes.
|
||||
$$
|
||||
@ -366,6 +366,8 @@ export interface RequestConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -1052,6 +1054,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1256,6 +1260,8 @@ export interface ConfigObject {
|
||||
* Admin role for full metadata extraction.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -1660,6 +1666,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2521,6 +2543,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3314,6 +3345,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -4091,6 +4132,7 @@ export enum ConfigType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -106,6 +106,8 @@ export interface PipelineConnection {
|
||||
* Azure Data Factory Connection Config
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*/
|
||||
export interface ConfigObject {
|
||||
/**
|
||||
@ -168,6 +170,8 @@ export interface ConfigObject {
|
||||
username?: string;
|
||||
/**
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -330,6 +334,26 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
@ -395,6 +419,15 @@ export interface AWSCredentials {
|
||||
profileName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Azure Cloud Credentials
|
||||
*
|
||||
@ -824,6 +857,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* We support username/password or client certificate authentication
|
||||
*
|
||||
@ -956,6 +999,7 @@ export enum PipelineServiceType {
|
||||
Matillion = "Matillion",
|
||||
Nifi = "Nifi",
|
||||
OpenLineage = "OpenLineage",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssis = "SSIS",
|
||||
|
||||
@ -2439,6 +2439,8 @@ export interface ServiceConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -2917,6 +2919,8 @@ export interface ConfigObject {
|
||||
* API key to authenticate with the SAP ERP APIs.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -3385,6 +3389,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -3907,6 +3913,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -4576,6 +4598,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -5180,6 +5211,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -5952,6 +5993,7 @@ export enum PurpleType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -248,6 +248,8 @@ export interface TestServiceConnectionConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -934,6 +936,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1138,6 +1142,8 @@ export interface ConfigObject {
|
||||
* Admin role for full metadata extraction.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -1542,6 +1548,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2403,6 +2425,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3196,6 +3227,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -3973,6 +4014,7 @@ export enum ConfigType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -784,6 +784,8 @@ export interface RequestConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -1470,6 +1472,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1674,6 +1678,8 @@ export interface ConfigObject {
|
||||
* Admin role for full metadata extraction.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -2078,6 +2084,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2846,6 +2868,15 @@ export interface QlikCertificatesBy {
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3628,6 +3659,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -4360,6 +4401,7 @@ export enum ConfigType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -683,6 +683,7 @@ export enum PipelineServiceType {
|
||||
Matillion = "Matillion",
|
||||
Nifi = "Nifi",
|
||||
OpenLineage = "OpenLineage",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssis = "SSIS",
|
||||
|
||||
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright 2025 Collate.
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
/**
|
||||
* Snowplow Pipeline Connection Config
|
||||
*/
|
||||
export interface Snowplowconnection {
|
||||
/**
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment: SnowplowDeployment;
|
||||
/**
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
* Regex exclude pipelines.
|
||||
*/
|
||||
pipelineFilterPattern?: FilterPattern;
|
||||
supportsMetadataExtraction?: boolean;
|
||||
/**
|
||||
* Service Type
|
||||
*/
|
||||
type: SnowplowType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex exclude pipelines.
|
||||
*
|
||||
* Regex to only fetch entities that matches the pattern.
|
||||
*/
|
||||
export interface FilterPattern {
|
||||
/**
|
||||
* List of strings/regex patterns to match and exclude only database entities that match.
|
||||
*/
|
||||
excludes?: string[];
|
||||
/**
|
||||
* List of strings/regex patterns to match and include only database entities that match.
|
||||
*/
|
||||
includes?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Service Type
|
||||
*
|
||||
* Service type.
|
||||
*/
|
||||
export enum SnowplowType {
|
||||
Snowplow = "Snowplow",
|
||||
}
|
||||
@ -245,6 +245,8 @@ export interface ServiceConnectionClass {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -723,6 +725,8 @@ export interface ConfigObject {
|
||||
* API key to authenticate with the SAP ERP APIs.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -1191,6 +1195,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1713,6 +1719,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2426,6 +2448,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3223,6 +3254,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -4007,6 +4048,7 @@ export enum ConfigType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -2950,6 +2950,8 @@ export interface ServiceConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -3428,6 +3430,8 @@ export interface ConfigObject {
|
||||
* API key to authenticate with the SAP ERP APIs.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -3896,6 +3900,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -4418,6 +4424,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -5038,6 +5060,15 @@ export interface QlikCertificatesBy {
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -5631,6 +5662,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -6360,6 +6401,7 @@ export enum PurpleType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -216,6 +216,8 @@ export interface PipelineConnection {
|
||||
* Azure Data Factory Connection Config
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*/
|
||||
export interface ConfigObject {
|
||||
/**
|
||||
@ -278,6 +280,8 @@ export interface ConfigObject {
|
||||
username?: string;
|
||||
/**
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -440,6 +444,26 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
[property: string]: any;
|
||||
}
|
||||
|
||||
@ -505,6 +529,15 @@ export interface AWSCredentials {
|
||||
profileName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Azure Cloud Credentials
|
||||
*
|
||||
@ -934,6 +967,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* We support username/password or client certificate authentication
|
||||
*
|
||||
@ -1068,6 +1111,7 @@ export enum PipelineServiceType {
|
||||
Matillion = "Matillion",
|
||||
Nifi = "Nifi",
|
||||
OpenLineage = "OpenLineage",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssis = "SSIS",
|
||||
|
||||
@ -289,6 +289,8 @@ export interface ServiceConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -767,6 +769,8 @@ export interface ConfigObject {
|
||||
* API key to authenticate with the SAP ERP APIs.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -1235,6 +1239,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1757,6 +1763,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2470,6 +2492,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3267,6 +3298,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -4051,6 +4092,7 @@ export enum ConfigType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -325,6 +325,8 @@ export interface ServiceConnection {
|
||||
*
|
||||
* Stitch Connection
|
||||
*
|
||||
* Snowplow Pipeline Connection Config
|
||||
*
|
||||
* MlFlow Connection Config
|
||||
*
|
||||
* Sklearn Connection Config
|
||||
@ -803,6 +805,8 @@ export interface ConfigObject {
|
||||
* API key to authenticate with the SAP ERP APIs.
|
||||
*
|
||||
* Fivetran API Secret.
|
||||
*
|
||||
* API Key for Snowplow Console API
|
||||
*/
|
||||
apiKey?: string;
|
||||
/**
|
||||
@ -1271,6 +1275,8 @@ export interface ConfigObject {
|
||||
verify?: string;
|
||||
/**
|
||||
* Salesforce Organization ID is the unique identifier for your Salesforce identity
|
||||
*
|
||||
* Snowplow BDP Organization ID
|
||||
*/
|
||||
organizationId?: string;
|
||||
/**
|
||||
@ -1793,6 +1799,22 @@ export interface ConfigObject {
|
||||
* The azure subscription identifier.
|
||||
*/
|
||||
subscription_id?: string;
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
cloudProvider?: CloudProvider;
|
||||
/**
|
||||
* Path to pipeline configuration files for Community deployment
|
||||
*/
|
||||
configPath?: string;
|
||||
/**
|
||||
* Snowplow Console URL for BDP deployment
|
||||
*/
|
||||
consoleUrl?: string;
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*/
|
||||
deployment?: SnowplowDeployment;
|
||||
/**
|
||||
* Regex to only fetch MlModels with names matching the pattern.
|
||||
*/
|
||||
@ -2527,6 +2549,15 @@ export interface ConsumerConfigSSLClass {
|
||||
sslKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cloud provider where Snowplow is deployed
|
||||
*/
|
||||
export enum CloudProvider {
|
||||
Aws = "AWS",
|
||||
Azure = "Azure",
|
||||
Gcp = "GCP",
|
||||
}
|
||||
|
||||
/**
|
||||
* Available sources to fetch the metadata.
|
||||
*
|
||||
@ -3324,6 +3355,16 @@ export enum MssqlType {
|
||||
Mssql = "Mssql",
|
||||
}
|
||||
|
||||
/**
|
||||
* Snowplow deployment type (BDP for managed or Community for self-hosted)
|
||||
*
|
||||
* Snowplow deployment type
|
||||
*/
|
||||
export enum SnowplowDeployment {
|
||||
Bdp = "BDP",
|
||||
Community = "Community",
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for Sink Component in the OpenMetadata Ingestion Framework.
|
||||
*/
|
||||
@ -4108,6 +4149,7 @@ export enum PurpleType {
|
||||
SingleStore = "SingleStore",
|
||||
Sklearn = "Sklearn",
|
||||
Snowflake = "Snowflake",
|
||||
Snowplow = "Snowplow",
|
||||
Spark = "Spark",
|
||||
Spline = "Spline",
|
||||
Ssas = "SSAS",
|
||||
|
||||
@ -125,6 +125,7 @@ import {
|
||||
SecurityServiceTypeSmallCaseType,
|
||||
StorageServiceTypeSmallCaseType,
|
||||
} from '../enums/service.enum';
|
||||
import { DriveServiceType } from '../generated/api/services/createDriveService';
|
||||
import { ConfigObject } from '../generated/entity/automations/testServiceConnection';
|
||||
import { WorkflowType } from '../generated/entity/automations/workflow';
|
||||
import { StorageServiceType } from '../generated/entity/data/container';
|
||||
@ -172,6 +173,8 @@ class ServiceUtilClassBase {
|
||||
PipelineServiceType.Wherescape,
|
||||
SecurityServiceType.Ranger,
|
||||
DatabaseServiceType.Epic,
|
||||
DriveServiceType.GoogleDrive,
|
||||
PipelineServiceType.Snowplow,
|
||||
];
|
||||
|
||||
DatabaseServiceTypeSmallCase = this.convertEnumToLowerCase<
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user