Fix #3832 Service Connection: Add more test coverage and add support for Messaging and Dashboard services (#3835)

This commit is contained in:
Sriharsha Chintalapani 2022-04-05 12:20:52 -07:00 committed by GitHub
parent b3480693e4
commit 2db3b9dd94
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 187 additions and 17 deletions

View File

@ -18,6 +18,8 @@ import static org.openmetadata.catalog.Entity.FIELD_OWNER;
import com.fasterxml.jackson.core.JsonProcessingException;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.openmetadata.catalog.Entity;
import org.openmetadata.catalog.entity.services.DashboardService;
@ -25,6 +27,8 @@ import org.openmetadata.catalog.resources.services.dashboard.DashboardServiceRes
import org.openmetadata.catalog.type.ChangeDescription;
import org.openmetadata.catalog.type.DashboardConnection;
import org.openmetadata.catalog.type.EntityReference;
import org.openmetadata.catalog.type.Include;
import org.openmetadata.catalog.type.Relationship;
import org.openmetadata.catalog.util.EntityInterface;
import org.openmetadata.catalog.util.EntityUtil.Fields;
@ -45,6 +49,7 @@ public class DashboardServiceRepository extends EntityRepository<DashboardServic
@Override
public DashboardService setFields(DashboardService entity, Fields fields) throws IOException {
entity.setPipelines(fields.contains("pipelines") ? getIngestionPipelines(entity) : null);
entity.setOwner(fields.contains(FIELD_OWNER) ? getOwner(entity) : null);
return entity;
}
@ -85,6 +90,19 @@ public class DashboardServiceRepository extends EntityRepository<DashboardServic
return new DashboardServiceUpdater(original, updated, operation);
}
private List<EntityReference> getIngestionPipelines(DashboardService service) throws IOException {
List<String> ingestionPipelineIds =
findTo(service.getId(), Entity.DASHBOARD_SERVICE, Relationship.CONTAINS, Entity.INGESTION_PIPELINE);
List<EntityReference> ingestionPipelines = new ArrayList<>();
for (String ingestionPipelineId : ingestionPipelineIds) {
ingestionPipelines.add(
daoCollection
.ingestionPipelineDAO()
.findEntityReferenceById(UUID.fromString(ingestionPipelineId), Include.ALL));
}
return ingestionPipelines;
}
public static class DashboardServiceEntityInterface extends EntityInterface<DashboardService> {
public DashboardServiceEntityInterface(DashboardService entity) {
super(Entity.DASHBOARD_SERVICE, entity);

View File

@ -17,12 +17,16 @@ import static org.openmetadata.catalog.Entity.FIELD_OWNER;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.openmetadata.catalog.Entity;
import org.openmetadata.catalog.entity.services.MessagingService;
import org.openmetadata.catalog.resources.services.messaging.MessagingServiceResource;
import org.openmetadata.catalog.type.ChangeDescription;
import org.openmetadata.catalog.type.EntityReference;
import org.openmetadata.catalog.type.Include;
import org.openmetadata.catalog.type.Relationship;
import org.openmetadata.catalog.util.EntityInterface;
import org.openmetadata.catalog.util.EntityUtil.Fields;
@ -43,6 +47,7 @@ public class MessagingServiceRepository extends EntityRepository<MessagingServic
@Override
public MessagingService setFields(MessagingService entity, Fields fields) throws IOException {
entity.setPipelines(fields.contains("pipelines") ? getIngestionPipelines(entity) : null);
entity.setOwner(fields.contains(FIELD_OWNER) ? getOwner(entity) : null);
return entity;
}
@ -83,6 +88,19 @@ public class MessagingServiceRepository extends EntityRepository<MessagingServic
return new MessagingServiceUpdater(original, updated, operation);
}
private List<EntityReference> getIngestionPipelines(MessagingService service) throws IOException {
List<String> ingestionPipelineIds =
findTo(service.getId(), Entity.MESSAGING_SERVICE, Relationship.CONTAINS, Entity.INGESTION_PIPELINE);
List<EntityReference> ingestionPipelines = new ArrayList<>();
for (String ingestionPipelineId : ingestionPipelineIds) {
ingestionPipelines.add(
daoCollection
.ingestionPipelineDAO()
.findEntityReferenceById(UUID.fromString(ingestionPipelineId), Include.ALL));
}
return ingestionPipelines;
}
public static class MessagingServiceEntityInterface extends EntityInterface<MessagingService> {
public MessagingServiceEntityInterface(MessagingService entity) {
super(Entity.MESSAGING_SERVICE, entity);

View File

@ -119,7 +119,7 @@ public class IngestionPipelineResource extends EntityResource<IngestionPipeline,
@GET
@Valid
@Operation(
summary = "List Airflow Pipelines for Metadata Operations",
summary = "List Ingestion Pipelines for Metadata Operations",
tags = "IngestionPipelines",
description =
"Get a list of Airflow Pipelines for Metadata Operations. Use `fields` parameter to get only necessary fields. "
@ -291,7 +291,7 @@ public class IngestionPipelineResource extends EntityResource<IngestionPipeline,
responses = {
@ApiResponse(
responseCode = "200",
description = "The Airflow Pipeline",
description = "The Ingestion Pipeline",
content =
@Content(
mediaType = "application/json",

View File

@ -21,6 +21,7 @@ import static org.openmetadata.catalog.util.TestUtils.assertResponseContains;
import static org.openmetadata.catalog.util.TestUtils.getPrincipal;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.HttpResponseException;
@ -30,10 +31,16 @@ import org.openmetadata.catalog.Entity;
import org.openmetadata.catalog.api.services.CreateDatabaseService;
import org.openmetadata.catalog.api.services.CreateDatabaseService.DatabaseServiceType;
import org.openmetadata.catalog.api.services.DatabaseConnection;
import org.openmetadata.catalog.api.services.ingestionPipelines.CreateIngestionPipeline;
import org.openmetadata.catalog.entity.services.DatabaseService;
import org.openmetadata.catalog.entity.services.ingestionPipelines.IngestionPipeline;
import org.openmetadata.catalog.jdbi3.DatabaseServiceRepository.DatabaseServiceEntityInterface;
import org.openmetadata.catalog.metadataIngestion.DatabaseServiceMetadataPipeline;
import org.openmetadata.catalog.metadataingestion.FilterPattern;
import org.openmetadata.catalog.metadataingestion.SourceConfig;
import org.openmetadata.catalog.resources.EntityResourceTest;
import org.openmetadata.catalog.resources.services.database.DatabaseServiceResource.DatabaseServiceList;
import org.openmetadata.catalog.resources.services.ingestionpipelines.IngestionPipelineResourceTest;
import org.openmetadata.catalog.services.connections.database.BigQueryConnection;
import org.openmetadata.catalog.services.connections.database.ConnectionArguments;
import org.openmetadata.catalog.services.connections.database.ConnectionOptions;
@ -145,26 +152,25 @@ public class DatabaseServiceResourceTest extends EntityResourceTest<DatabaseServ
service = getEntity(service.getId(), ADMIN_AUTH_HEADERS);
validateDatabaseConnection(databaseConnection, service.getConnection(), service.getServiceType());
/*AirflowPipelineResourceTest airflowPipelineResourceTest = new AirflowPipelineResourceTest();
CreateAirflowPipeline createAirflowPipeline =
airflowPipelineResourceTest.createRequest(test).withService(serviceRef);
IngestionPipelineResourceTest ingestionPipelineResourceTest = new IngestionPipelineResourceTest();
CreateIngestionPipeline createIngestionPipeline =
ingestionPipelineResourceTest.createRequest(test).withService(serviceRef);
DatabaseServiceMetadataPipeline databaseServiceMetadataPipeline =
new DatabaseServiceMetadataPipeline()
.withMarkDeletedTables(true)
.withIncludeViews(true)
.withSchemaFilterPattern(new FilterPattern().withExcludes(asList("information_schema.*", "test.*")))
.withTableFilterPattern(new FilterPattern().withIncludes(asList("sales.*", "users.*")));
PipelineConfig pipelineConfig =
new PipelineConfig().withSchema(PipelineConfig.Schema.DATABASE_SERVICE_METADATA_PIPELINE);
createAirflowPipeline.withPipelineConfig(pipelineConfig);
AirflowPipeline airflowPipeline =
airflowPipelineResourceTest.createEntity(createAirflowPipeline, ADMIN_AUTH_HEADERS);
.withSchemaFilterPattern(new FilterPattern().withExcludes(List.of("information_schema.*", "test.*")))
.withTableFilterPattern(new FilterPattern().withIncludes(List.of("sales.*", "users.*")));
SourceConfig sourceConfig = new SourceConfig().withConfig(databaseServiceMetadataPipeline);
createIngestionPipeline.withSourceConfig(sourceConfig);
IngestionPipeline ingestionPipeline =
ingestionPipelineResourceTest.createEntity(createIngestionPipeline, ADMIN_AUTH_HEADERS);
DatabaseService updatedService = getEntity(service.getId(), "pipelines", ADMIN_AUTH_HEADERS);
assertEquals(1, updatedService.getPipelines().size());
EntityReference expectedPipeline = updatedService.getPipelines().get(0);
assertEquals(airflowPipeline.getId(), expectedPipeline.getId());
assertEquals(airflowPipeline.getFullyQualifiedName(), expectedPipeline.getName());*/
assertEquals(ingestionPipeline.getId(), expectedPipeline.getId());
assertEquals(ingestionPipeline.getFullyQualifiedName(), expectedPipeline.getName());
}
@Override

View File

@ -26,7 +26,6 @@ import static org.openmetadata.catalog.util.TestUtils.assertResponseContains;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
@ -53,8 +52,10 @@ import org.openmetadata.catalog.entity.services.ingestionPipelines.IngestionPipe
import org.openmetadata.catalog.entity.services.ingestionPipelines.PipelineType;
import org.openmetadata.catalog.jdbi3.DatabaseServiceRepository.DatabaseServiceEntityInterface;
import org.openmetadata.catalog.jdbi3.IngestionPipelineRepository;
import org.openmetadata.catalog.metadataIngestion.DashboardServiceMetadataPipeline;
import org.openmetadata.catalog.metadataIngestion.DatabaseServiceMetadataPipeline;
import org.openmetadata.catalog.metadataIngestion.DatabaseServiceQueryUsagePipeline;
import org.openmetadata.catalog.metadataIngestion.MessagingServiceMetadataPipeline;
import org.openmetadata.catalog.metadataingestion.FilterPattern;
import org.openmetadata.catalog.resources.EntityResourceTest;
import org.openmetadata.catalog.resources.services.DatabaseServiceResourceTest;
@ -75,6 +76,8 @@ import org.openmetadata.catalog.util.TestUtils;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class IngestionPipelineResourceTest extends EntityResourceTest<IngestionPipeline, CreateIngestionPipeline> {
public static SourceConfig DATABASE_METADATA_CONFIG;
public static SourceConfig DASHBOARD_METADATA_CONFIG;
public static SourceConfig MESSAGING_METADATA_CONFIG;
public static AirflowConfiguration AIRFLOW_CONFIG;
public static DatabaseServiceResourceTest DATABASE_SERVICE_RESOURCE_TEST;
@ -94,9 +97,17 @@ public class IngestionPipelineResourceTest extends EntityResourceTest<IngestionP
new DatabaseServiceMetadataPipeline()
.withMarkDeletedTables(true)
.withIncludeViews(true)
.withSchemaFilterPattern(new FilterPattern().withExcludes(Arrays.asList("information_schema.*", "test.*")))
.withTableFilterPattern(new FilterPattern().withIncludes(Arrays.asList("sales.*", "users.*")));
.withSchemaFilterPattern(new FilterPattern().withExcludes(List.of("information_schema.*", "test.*")))
.withTableFilterPattern(new FilterPattern().withIncludes(List.of("sales.*", "users.*")));
DashboardServiceMetadataPipeline dashboardServiceMetadataPipeline =
new DashboardServiceMetadataPipeline()
.withDashboardFilterPattern(new FilterPattern().withIncludes(List.of("dashboard.*", "users.*")));
MessagingServiceMetadataPipeline messagingServiceMetadataPipeline =
new MessagingServiceMetadataPipeline()
.withTopicFilterPattern(new FilterPattern().withExcludes(List.of("orders.*")));
DATABASE_METADATA_CONFIG = new SourceConfig().withConfig(databaseServiceMetadataPipeline);
DASHBOARD_METADATA_CONFIG = new SourceConfig().withConfig(dashboardServiceMetadataPipeline);
MESSAGING_METADATA_CONFIG = new SourceConfig().withConfig(messagingServiceMetadataPipeline);
AIRFLOW_CONFIG = new AirflowConfiguration();
AIRFLOW_CONFIG.setApiEndpoint("http://localhost:8080");
AIRFLOW_CONFIG.setUsername("admin");
@ -335,6 +346,113 @@ public class IngestionPipelineResourceTest extends EntityResourceTest<IngestionP
validateSourceConfig(updatedSourceConfig, updatedIngestion.getSource().getSourceConfig(), ingestion);
}
@Test
void put_IngestionPipelineForDashboardSourceUpdate_200(TestInfo test) throws IOException {
CreateIngestionPipeline request =
createRequest(test)
.withService(new EntityReference().withId(SUPERSET_REFERENCE.getId()).withType("dashboardService"))
.withDescription("description")
.withSourceConfig(DASHBOARD_METADATA_CONFIG)
.withAirflowConfig(new AirflowConfig().withScheduleInterval("5 * * * *").withStartDate("2021-11-21"));
createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
Integer pipelineConcurrency = 110;
Date startDate = new DateTime("2021-11-13T20:20:39+00:00").toDate();
String expectedScheduleInterval = "7 * * * *";
// Updating description is ignored when backend already has description
IngestionPipeline ingestion =
updateIngestionPipeline(
request
.withSourceConfig(DASHBOARD_METADATA_CONFIG)
.withAirflowConfig(
new AirflowConfig()
.withConcurrency(pipelineConcurrency)
.withScheduleInterval(expectedScheduleInterval)
.withStartDate(startDate.toString())),
OK,
ADMIN_AUTH_HEADERS);
String expectedFQN = EntityUtil.getFQN(SUPERSET_REFERENCE.getName(), ingestion.getName());
assertEquals(startDate.toString(), ingestion.getAirflowConfig().getStartDate());
assertEquals(pipelineConcurrency, ingestion.getAirflowConfig().getConcurrency());
assertEquals(expectedFQN, ingestion.getFullyQualifiedName());
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
ingestion = getEntity(ingestion.getId(), "owner", ADMIN_AUTH_HEADERS);
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
DashboardServiceMetadataPipeline dashboardServiceMetadataPipeline =
new DashboardServiceMetadataPipeline()
.withDashboardFilterPattern(new FilterPattern().withIncludes(List.of("test1.*", "test2.*")));
SourceConfig updatedSourceConfig = new SourceConfig().withConfig(dashboardServiceMetadataPipeline);
IngestionPipeline updatedIngestion =
updateIngestionPipeline(
request
.withSourceConfig(updatedSourceConfig)
.withAirflowConfig(
new AirflowConfig()
.withConcurrency(pipelineConcurrency)
.withScheduleInterval(expectedScheduleInterval)
.withStartDate(startDate.toString())),
OK,
ADMIN_AUTH_HEADERS);
assertEquals(startDate.toString(), ingestion.getAirflowConfig().getStartDate());
assertEquals(pipelineConcurrency, ingestion.getAirflowConfig().getConcurrency());
assertEquals(expectedFQN, ingestion.getFullyQualifiedName());
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
validateSourceConfig(updatedSourceConfig, updatedIngestion.getSource().getSourceConfig(), ingestion);
}
@Test
void put_IngestionPipelineForMessagingSourceUpdate_200(TestInfo test) throws IOException {
CreateIngestionPipeline request =
createRequest(test)
.withService(new EntityReference().withId(KAFKA_REFERENCE.getId()).withType("messagingService"))
.withDescription("description")
.withSourceConfig(MESSAGING_METADATA_CONFIG)
.withAirflowConfig(new AirflowConfig().withScheduleInterval("5 * * * *").withStartDate("2021-11-21"));
createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
Integer pipelineConcurrency = 110;
Date startDate = new DateTime("2021-11-13T20:20:39+00:00").toDate();
String expectedScheduleInterval = "7 * * * *";
// Updating description is ignored when backend already has description
IngestionPipeline ingestion =
updateIngestionPipeline(
request
.withSourceConfig(MESSAGING_METADATA_CONFIG)
.withAirflowConfig(
new AirflowConfig()
.withConcurrency(pipelineConcurrency)
.withScheduleInterval(expectedScheduleInterval)
.withStartDate(startDate.toString())),
OK,
ADMIN_AUTH_HEADERS);
String expectedFQN = EntityUtil.getFQN(KAFKA_REFERENCE.getName(), ingestion.getName());
assertEquals(startDate.toString(), ingestion.getAirflowConfig().getStartDate());
assertEquals(pipelineConcurrency, ingestion.getAirflowConfig().getConcurrency());
assertEquals(expectedFQN, ingestion.getFullyQualifiedName());
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
ingestion = getEntity(ingestion.getId(), "owner", ADMIN_AUTH_HEADERS);
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
MessagingServiceMetadataPipeline messagingServiceMetadataPipeline =
new MessagingServiceMetadataPipeline()
.withTopicFilterPattern(new FilterPattern().withIncludes(List.of("topic1.*", "topic2.*")));
SourceConfig updatedSourceConfig = new SourceConfig().withConfig(messagingServiceMetadataPipeline);
IngestionPipeline updatedIngestion =
updateIngestionPipeline(
request
.withSourceConfig(updatedSourceConfig)
.withAirflowConfig(
new AirflowConfig()
.withConcurrency(pipelineConcurrency)
.withScheduleInterval(expectedScheduleInterval)
.withStartDate(startDate.toString())),
OK,
ADMIN_AUTH_HEADERS);
assertEquals(startDate.toString(), ingestion.getAirflowConfig().getStartDate());
assertEquals(pipelineConcurrency, ingestion.getAirflowConfig().getConcurrency());
assertEquals(expectedFQN, ingestion.getFullyQualifiedName());
assertEquals(expectedScheduleInterval, ingestion.getAirflowConfig().getScheduleInterval());
validateSourceConfig(updatedSourceConfig, updatedIngestion.getSource().getSourceConfig(), ingestion);
}
@Test
void post_AirflowWithDatabaseServiceMetadata_GeneratedIngestionPipelineConfig_200_ok(TestInfo test)
throws IOException {
@ -532,6 +650,16 @@ public class IngestionPipelineResourceTest extends EntityResourceTest<IngestionP
DatabaseServiceQueryUsagePipeline updatedConfig =
JsonUtils.convertValue(updated.getConfig(), DatabaseServiceQueryUsagePipeline.class);
assertEquals(origConfig, updatedConfig);
} else if (serviceType.equals(Entity.DASHBOARD_SERVICE)) {
DashboardServiceMetadataPipeline origConfig = (DashboardServiceMetadataPipeline) orig.getConfig();
DashboardServiceMetadataPipeline updatedConfig =
JsonUtils.convertValue(updated.getConfig(), DashboardServiceMetadataPipeline.class);
assertEquals(origConfig, updatedConfig);
} else if (serviceType.equals(Entity.MESSAGING_SERVICE)) {
MessagingServiceMetadataPipeline origConfig = (MessagingServiceMetadataPipeline) orig.getConfig();
MessagingServiceMetadataPipeline updatedConfig =
JsonUtils.convertValue(updated.getConfig(), MessagingServiceMetadataPipeline.class);
assertEquals(origConfig, updatedConfig);
}
}
}