MINOR: Add new WorkflowType and converter (#23090)

* Add new WorkflowType and converter

* Update generated TypeScript types

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
IceS2 2025-09-04 10:11:35 +02:00 committed by GitHub
parent 8dd69af269
commit ebc67a9f97
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 162 additions and 9 deletions

View File

@ -17,6 +17,7 @@ import java.util.Map;
import lombok.Getter; import lombok.Getter;
import org.openmetadata.schema.auth.SSOAuthMechanism; import org.openmetadata.schema.auth.SSOAuthMechanism;
import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest; import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest;
import org.openmetadata.schema.entity.automations.TestSparkEngineConnectionRequest;
import org.openmetadata.schema.entity.automations.Workflow; import org.openmetadata.schema.entity.automations.Workflow;
import org.openmetadata.schema.metadataIngestion.DbtPipeline; import org.openmetadata.schema.metadataIngestion.DbtPipeline;
import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig; import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig;
@ -93,17 +94,19 @@ public final class ClassConverterFactory {
Map.entry( Map.entry(
TestServiceConnectionRequest.class, TestServiceConnectionRequest.class,
new TestServiceConnectionRequestClassConverter()), new TestServiceConnectionRequestClassConverter()),
Map.entry(
TestSparkEngineConnectionRequest.class,
new TestSparkEngineConnectionRequestClassConverter()),
Map.entry(TrinoConnection.class, new TrinoConnectionClassConverter()), Map.entry(TrinoConnection.class, new TrinoConnectionClassConverter()),
Map.entry(Workflow.class, new WorkflowClassConverter()), Map.entry(Workflow.class, new WorkflowClassConverter()),
Map.entry(CockroachConnection.class, new CockroachConnectionClassConverter()), Map.entry(CockroachConnection.class, new CockroachConnectionClassConverter()),
Map.entry(NifiConnection.class, new NifiConnectionClassConverter()), Map.entry(NifiConnection.class, new NifiConnectionClassConverter()),
Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()), Map.entry(MatillionConnection.class, new MatillionConnectionClassConverter()),
Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()), Map.entry(VertexAIConnection.class, new VertexAIConnectionClassConverter()),
Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()), Map.entry(RangerConnection.class, new RangerConnectionClassConverter()),
Map.entry(RangerConnection.class, new RangerConnectionClassConverter())); Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()),
Map.entry(Workflow.class, new WorkflowClassConverter()); Map.entry(SSISConnection.class, new SsisConnectionClassConverter()),
Map.entry(CassandraConnection.class, new CassandraConnectionClassConverter()); Map.entry(WherescapeConnection.class, new WherescapeConnectionClassConverter()));
Map.entry(SSISConnection.class, new SsisConnectionClassConverter());
} }
public static ClassConverter getConverter(Class<?> clazz) { public static ClassConverter getConverter(Class<?> clazz) {

View File

@ -0,0 +1,30 @@
/*
* Copyright 2021 Collate
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openmetadata.service.secrets.converter;
import org.openmetadata.schema.entity.automations.TestSparkEngineConnectionRequest;
import org.openmetadata.schema.utils.JsonUtils;
/** Converter class to get a `TestSparkEngineConnectionRequest` object. */
public class TestSparkEngineConnectionRequestClassConverter extends ClassConverter {
public TestSparkEngineConnectionRequestClassConverter() {
super(TestSparkEngineConnectionRequest.class);
}
@Override
public Object convert(Object object) {
return JsonUtils.convertValue(object, this.clazz);
}
}

View File

@ -15,6 +15,7 @@ package org.openmetadata.service.secrets.converter;
import java.util.List; import java.util.List;
import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest; import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest;
import org.openmetadata.schema.entity.automations.TestSparkEngineConnectionRequest;
import org.openmetadata.schema.entity.automations.Workflow; import org.openmetadata.schema.entity.automations.Workflow;
import org.openmetadata.schema.metadataIngestion.ReverseIngestionPipeline; import org.openmetadata.schema.metadataIngestion.ReverseIngestionPipeline;
import org.openmetadata.schema.services.connections.metadata.OpenMetadataConnection; import org.openmetadata.schema.services.connections.metadata.OpenMetadataConnection;
@ -33,7 +34,10 @@ public class WorkflowClassConverter extends ClassConverter {
tryToConvertOrFail( tryToConvertOrFail(
workflow.getRequest(), workflow.getRequest(),
List.of(TestServiceConnectionRequest.class, ReverseIngestionPipeline.class)) List.of(
TestServiceConnectionRequest.class,
ReverseIngestionPipeline.class,
TestSparkEngineConnectionRequest.class))
.ifPresent(workflow::setRequest); .ifPresent(workflow::setRequest);
if (workflow.getOpenMetadataServerConnection() != null) { if (workflow.getOpenMetadataServerConnection() != null) {

View File

@ -0,0 +1,19 @@
{
"$id": "https://open-metadata.org/schema/entity/automations/testSparkEngineConnection.json",
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "TestSparkEngineConnectionRequest",
"javaType": "org.openmetadata.schema.entity.automations.TestSparkEngineConnectionRequest",
"description": "Test Spark Engine Connection to test user provided configuration is valid or not.",
"type": "object",
"properties": {
"sparkEngine": {
"description": "Spark Engine Configuration.",
"$ref": "../../metadataIngestion/engine/sparkEngineConfig.json"
},
"ingestionRunner": {
"description": "Optional value of the ingestion runner name responsible for running the test",
"type": "string"
}
},
"additionalProperties": false
}

View File

@ -15,7 +15,8 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"TEST_CONNECTION", "TEST_CONNECTION",
"REVERSE_INGESTION" "REVERSE_INGESTION",
"TEST_SPARK_ENGINE_CONNECTION"
] ]
}, },
"workflowStatus": { "workflowStatus": {
@ -68,6 +69,9 @@
}, },
{ {
"$ref": "../../metadataIngestion/reverseIngestionPipeline.json" "$ref": "../../metadataIngestion/reverseIngestionPipeline.json"
},
{
"$ref": "testSparkEngineConnection.json"
} }
] ]
}, },

View File

@ -4460,4 +4460,5 @@ export enum WorkflowStatus {
export enum WorkflowType { export enum WorkflowType {
ReverseIngestion = "REVERSE_INGESTION", ReverseIngestion = "REVERSE_INGESTION",
TestConnection = "TEST_CONNECTION", TestConnection = "TEST_CONNECTION",
TestSparkEngineConnection = "TEST_SPARK_ENGINE_CONNECTION",
} }

View File

@ -0,0 +1,55 @@
/*
* Copyright 2025 Collate.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Test Spark Engine Connection to test user provided configuration is valid or not.
*/
export interface TestSparkEngineConnection {
/**
* Optional value of the ingestion runner name responsible for running the test
*/
ingestionRunner?: string;
/**
* Spark Engine Configuration.
*/
sparkEngine?: SparkEngineConfiguration;
}
/**
* Spark Engine Configuration.
*
* This schema defines the configuration for a Spark Engine runner.
*/
export interface SparkEngineConfiguration {
config?: Config;
/**
* Spark Connect Remote URL.
*/
remote: string;
type: Type;
}
export interface Config {
/**
* Additional Spark configuration properties as key-value pairs.
*/
extraConfig?: { [key: string]: any };
/**
* Temporary path to store the data.
*/
tempPath?: string;
[property: string]: any;
}
export enum Type {
Spark = "Spark",
}

View File

@ -534,6 +534,8 @@ export enum VerifySSL {
* Test Service Connection to test user provided configuration is valid or not. * Test Service Connection to test user provided configuration is valid or not.
* *
* Apply a set of operations on a service * Apply a set of operations on a service
*
* Test Spark Engine Connection to test user provided configuration is valid or not.
*/ */
export interface TestServiceConnectionRequest { export interface TestServiceConnectionRequest {
/** /**
@ -574,6 +576,10 @@ export interface TestServiceConnectionRequest {
* Pipeline type * Pipeline type
*/ */
type?: ReverseIngestionType; type?: ReverseIngestionType;
/**
* Spark Engine Configuration.
*/
sparkEngine?: SparkEngineConfiguration;
} }
/** /**
@ -4450,7 +4456,7 @@ export interface Operation {
/** /**
* Type of operation to perform * Type of operation to perform
*/ */
type: Type; type: OperationType;
} }
/** /**
@ -4580,7 +4586,7 @@ export interface Style {
/** /**
* Type of operation to perform * Type of operation to perform
*/ */
export enum Type { export enum OperationType {
UpdateDescription = "UPDATE_DESCRIPTION", UpdateDescription = "UPDATE_DESCRIPTION",
UpdateOwner = "UPDATE_OWNER", UpdateOwner = "UPDATE_OWNER",
UpdateTags = "UPDATE_TAGS", UpdateTags = "UPDATE_TAGS",
@ -4605,6 +4611,36 @@ export enum ServiceType {
Storage = "Storage", Storage = "Storage",
} }
/**
* Spark Engine Configuration.
*
* This schema defines the configuration for a Spark Engine runner.
*/
export interface SparkEngineConfiguration {
config?: Config;
/**
* Spark Connect Remote URL.
*/
remote: string;
type: SparkEngineType;
}
export interface Config {
/**
* Additional Spark configuration properties as key-value pairs.
*/
extraConfig?: { [key: string]: any };
/**
* Temporary path to store the data.
*/
tempPath?: string;
[property: string]: any;
}
export enum SparkEngineType {
Spark = "Spark",
}
/** /**
* Pipeline type * Pipeline type
* *
@ -4729,4 +4765,5 @@ export enum WorkflowStatus {
export enum WorkflowType { export enum WorkflowType {
ReverseIngestion = "REVERSE_INGESTION", ReverseIngestion = "REVERSE_INGESTION",
TestConnection = "TEST_CONNECTION", TestConnection = "TEST_CONNECTION",
TestSparkEngineConnection = "TEST_SPARK_ENGINE_CONNECTION",
} }