mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-12-04 19:44:58 +00:00
* #12678 - Add SAP Hana Backend Converter * Format --------- Co-authored-by: Mohit Yadav <105265192+mohityadav766@users.noreply.github.com>
This commit is contained in:
parent
00677a1e1b
commit
9329b5a488
@ -46,25 +46,26 @@ public final class ClassConverterFactory {
|
|||||||
Map.entry(BigQueryConnection.class, new BigQueryConnectionClassConverter()),
|
Map.entry(BigQueryConnection.class, new BigQueryConnectionClassConverter()),
|
||||||
Map.entry(BigTableConnection.class, new BigTableConnectionClassConverter()),
|
Map.entry(BigTableConnection.class, new BigTableConnectionClassConverter()),
|
||||||
Map.entry(DatalakeConnection.class, new DatalakeConnectionClassConverter()),
|
Map.entry(DatalakeConnection.class, new DatalakeConnectionClassConverter()),
|
||||||
Map.entry(MysqlConnection.class, new MysqlConnectionClassConverter()),
|
|
||||||
Map.entry(HiveConnection.class, new HiveConnectionClassConverter()),
|
|
||||||
Map.entry(TrinoConnection.class, new TrinoConnectionClassConverter()),
|
|
||||||
Map.entry(PostgresConnection.class, new PostgresConnectionClassConverter()),
|
|
||||||
Map.entry(DbtGCSConfig.class, new DbtGCSConfigClassConverter()),
|
Map.entry(DbtGCSConfig.class, new DbtGCSConfigClassConverter()),
|
||||||
Map.entry(DbtPipeline.class, new DbtPipelineClassConverter()),
|
Map.entry(DbtPipeline.class, new DbtPipelineClassConverter()),
|
||||||
|
Map.entry(ElasticSearchConnection.class, new ElasticSearchConnectionClassConverter()),
|
||||||
Map.entry(GCSConfig.class, new GCPConfigClassConverter()),
|
Map.entry(GCSConfig.class, new GCPConfigClassConverter()),
|
||||||
Map.entry(GCPCredentials.class, new GcpCredentialsClassConverter()),
|
Map.entry(GCPCredentials.class, new GcpCredentialsClassConverter()),
|
||||||
Map.entry(GCSConnection.class, new GcpConnectionClassConverter()),
|
Map.entry(GCSConnection.class, new GcpConnectionClassConverter()),
|
||||||
Map.entry(ElasticSearchConnection.class, new ElasticSearchConnectionClassConverter()),
|
Map.entry(HiveConnection.class, new HiveConnectionClassConverter()),
|
||||||
|
Map.entry(IcebergConnection.class, new IcebergConnectionClassConverter()),
|
||||||
|
Map.entry(IcebergFileSystem.class, new IcebergFileSystemClassConverter()),
|
||||||
Map.entry(LookerConnection.class, new LookerConnectionClassConverter()),
|
Map.entry(LookerConnection.class, new LookerConnectionClassConverter()),
|
||||||
Map.entry(SSOAuthMechanism.class, new SSOAuthMechanismClassConverter()),
|
Map.entry(MysqlConnection.class, new MysqlConnectionClassConverter()),
|
||||||
|
Map.entry(PostgresConnection.class, new PostgresConnectionClassConverter()),
|
||||||
|
Map.entry(SapHanaConnection.class, new SapHanaConnectionClassConverter()),
|
||||||
Map.entry(SupersetConnection.class, new SupersetConnectionClassConverter()),
|
Map.entry(SupersetConnection.class, new SupersetConnectionClassConverter()),
|
||||||
|
Map.entry(SSOAuthMechanism.class, new SSOAuthMechanismClassConverter()),
|
||||||
Map.entry(TableauConnection.class, new TableauConnectionClassConverter()),
|
Map.entry(TableauConnection.class, new TableauConnectionClassConverter()),
|
||||||
Map.entry(
|
Map.entry(
|
||||||
TestServiceConnectionRequest.class,
|
TestServiceConnectionRequest.class,
|
||||||
new TestServiceConnectionRequestClassConverter()),
|
new TestServiceConnectionRequestClassConverter()),
|
||||||
Map.entry(IcebergConnection.class, new IcebergConnectionClassConverter()),
|
Map.entry(TrinoConnection.class, new TrinoConnectionClassConverter()),
|
||||||
Map.entry(IcebergFileSystem.class, new IcebergFileSystemClassConverter()),
|
|
||||||
Map.entry(Workflow.class, new WorkflowClassConverter()));
|
Map.entry(Workflow.class, new WorkflowClassConverter()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,42 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2021 Collate
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.openmetadata.service.secrets.converter;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import org.openmetadata.schema.services.connections.database.SapHanaConnection;
|
||||||
|
import org.openmetadata.schema.services.connections.database.sapHana.SapHanaHDBConnection;
|
||||||
|
import org.openmetadata.schema.services.connections.database.sapHana.SapHanaSQLConnection;
|
||||||
|
import org.openmetadata.service.util.JsonUtils;
|
||||||
|
|
||||||
|
/** Converter class to get an `Sap Hana` object. */
|
||||||
|
public class SapHanaConnectionClassConverter extends ClassConverter {
|
||||||
|
|
||||||
|
private static final List<Class<?>> CONNECTION_CLASSES =
|
||||||
|
List.of(SapHanaSQLConnection.class, SapHanaHDBConnection.class);
|
||||||
|
|
||||||
|
public SapHanaConnectionClassConverter() {
|
||||||
|
super(SapHanaConnection.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Object convert(Object object) {
|
||||||
|
SapHanaConnection sapHanaConnection =
|
||||||
|
(SapHanaConnection) JsonUtils.convertValue(object, this.clazz);
|
||||||
|
|
||||||
|
tryToConvertOrFail(sapHanaConnection.getConnection(), CONNECTION_CLASSES)
|
||||||
|
.ifPresent(sapHanaConnection::setConnection);
|
||||||
|
|
||||||
|
return sapHanaConnection;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$id": "https://open-metadata.org/schema/entity/services/connections/database/sapHana/sapHanaHDBConnection.json",
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "SapHanaHDBConnection",
|
||||||
|
"description": "Sap Hana Database HDB User Store Connection Config",
|
||||||
|
"type": "object",
|
||||||
|
"javaType": "org.openmetadata.schema.services.connections.database.sapHana.SapHanaHDBConnection",
|
||||||
|
"properties": {
|
||||||
|
"userKey": {
|
||||||
|
"title": "User Key",
|
||||||
|
"description": "HDB Store User Key generated from the command `hdbuserstore SET <KEY> <host:port> <USERNAME> <PASSWORD>`",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false
|
||||||
|
}
|
||||||
@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
"$id": "https://open-metadata.org/schema/entity/services/connections/database/sapHana/sapHanaConnection.json",
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "SapHanaSQLConnection",
|
||||||
|
"description": "Sap Hana Database SQL Connection Config",
|
||||||
|
"type": "object",
|
||||||
|
"javaType": "org.openmetadata.schema.services.connections.database.sapHana.SapHanaSQLConnection",
|
||||||
|
"properties": {
|
||||||
|
"hostPort": {
|
||||||
|
"title": "Host and Port",
|
||||||
|
"description": "Host and port of the Hana service.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"username": {
|
||||||
|
"title": "Username",
|
||||||
|
"description": "Username to connect to Hana. This user should have privileges to read all the metadata.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"password": {
|
||||||
|
"title": "Password",
|
||||||
|
"description": "Password to connect to Hana.",
|
||||||
|
"type": "string",
|
||||||
|
"format": "password"
|
||||||
|
},
|
||||||
|
"databaseSchema": {
|
||||||
|
"title": "Database Schema",
|
||||||
|
"description": "Database Schema of the data source. This is an optional parameter, if you would like to restrict the metadata reading to a single schema. When left blank, OpenMetadata Ingestion attempts to scan all the schemas.",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"title": "Database",
|
||||||
|
"description": "Database of the data source.",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": ["username", "password", "hostPort"]
|
||||||
|
}
|
||||||
@ -17,56 +17,8 @@
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["hana"],
|
"enum": ["hana"],
|
||||||
"default": "hana"
|
"default": "hana"
|
||||||
},
|
|
||||||
"sqlConnection": {
|
|
||||||
"title": "SQL Connection",
|
|
||||||
"description": "Options to connect to SAP Hana by passing the database information",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"hostPort": {
|
|
||||||
"title": "Host and Port",
|
|
||||||
"description": "Host and port of the Hana service.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"username": {
|
|
||||||
"title": "Username",
|
|
||||||
"description": "Username to connect to Hana. This user should have privileges to read all the metadata.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"password": {
|
|
||||||
"title": "Password",
|
|
||||||
"description": "Password to connect to Hana.",
|
|
||||||
"type": "string",
|
|
||||||
"format": "password"
|
|
||||||
},
|
|
||||||
"databaseSchema": {
|
|
||||||
"title": "Database Schema",
|
|
||||||
"description": "Database Schema of the data source. This is an optional parameter, if you would like to restrict the metadata reading to a single schema. When left blank, OpenMetadata Ingestion attempts to scan all the schemas.",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"database": {
|
|
||||||
"title": "Database",
|
|
||||||
"description": "Database of the data source.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": false,
|
|
||||||
"required": ["username", "password", "hostPort"]
|
|
||||||
},
|
|
||||||
"hdbUserStoreConnection": {
|
|
||||||
"title": "HDB User Store Connection",
|
|
||||||
"description": "Use HDB User Store to avoid entering connection-related information manually. This store needs to be present on the client running the ingestion.",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"userKey": {
|
|
||||||
"title": "User Key",
|
|
||||||
"description": "HDB Store User Key generated from the command `hdbuserstore SET <KEY> <host:port> <USERNAME> <PASSWORD>`",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"properties": {
|
"properties": {
|
||||||
"type": {
|
"type": {
|
||||||
"title": "Service Type",
|
"title": "Service Type",
|
||||||
@ -86,10 +38,10 @@
|
|||||||
"description": "Choose between Database connection or HDB User Store connection.",
|
"description": "Choose between Database connection or HDB User Store connection.",
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/sqlConnection"
|
"$ref": "sapHana/sapHanaSQLConnection.json"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/hdbUserStoreConnection"
|
"$ref": "sapHana/sapHanaHDBConnection.json"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user