Provide better errors on DataInsights Pipeline (#17315)

This commit is contained in:
IceS2 2024-08-06 17:02:03 +02:00 committed by GitHub
parent f0fd643bd8
commit 0a8ea56c99
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 114 additions and 96 deletions

View File

@ -6,6 +6,7 @@ import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getT
import es.org.elasticsearch.client.RestClient; import es.org.elasticsearch.client.RestClient;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import lombok.Getter; import lombok.Getter;
@ -119,9 +120,37 @@ public class DataInsightsApp extends AbstractNativeApplication {
backfill = Optional.empty(); backfill = Optional.empty();
} }
processWebAnalytics(jobExecutionContext); WorkflowStats webAnalyticsStats = processWebAnalytics();
processCostAnalysis(jobExecutionContext); updateJobStatsWithWorkflowStats(webAnalyticsStats);
processDataAssets(jobExecutionContext);
WorkflowStats costAnalysisStats = processCostAnalysis();
updateJobStatsWithWorkflowStats(costAnalysisStats);
WorkflowStats dataAssetsStats = processDataAssets();
updateJobStatsWithWorkflowStats(dataAssetsStats);
if (webAnalyticsStats.hasFailed()
|| costAnalysisStats.hasFailed()
|| dataAssetsStats.hasFailed()) {
String errorMessage = "Errors Found:\n";
for (WorkflowStats stats : List.of(webAnalyticsStats, costAnalysisStats, dataAssetsStats)) {
if (stats.hasFailed()) {
errorMessage = String.format("%s\n %s\n", errorMessage, stats.getName());
for (String failure : stats.getFailures()) {
errorMessage = String.format("%s - %s\n", errorMessage, failure);
}
}
}
IndexingError indexingError =
new IndexingError()
.withErrorSource(IndexingError.ErrorSource.JOB)
.withMessage(errorMessage);
LOG.error(indexingError.getMessage());
jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(indexingError);
}
updateJobStatus(); updateJobStatus();
} catch (Exception ex) { } catch (Exception ex) {
@ -130,7 +159,7 @@ public class DataInsightsApp extends AbstractNativeApplication {
.withErrorSource(IndexingError.ErrorSource.JOB) .withErrorSource(IndexingError.ErrorSource.JOB)
.withMessage( .withMessage(
String.format( String.format(
"Reindexing Job Has Encountered an Exception. %n Job Data: %s, %n Stack : %s ", "Data Insights Job Has Encountered an Exception. %n Job Data: %s, %n Stack : %s ",
jobData.toString(), ExceptionUtils.getStackTrace(ex))); jobData.toString(), ExceptionUtils.getStackTrace(ex)));
LOG.error(indexingError.getMessage()); LOG.error(indexingError.getMessage());
jobData.setStatus(EventPublisherJob.Status.FAILED); jobData.setStatus(EventPublisherJob.Status.FAILED);
@ -144,101 +173,54 @@ public class DataInsightsApp extends AbstractNativeApplication {
timestamp = TimestampUtils.getStartOfDayTimestamp(System.currentTimeMillis()); timestamp = TimestampUtils.getStartOfDayTimestamp(System.currentTimeMillis());
} }
private void processWebAnalytics(JobExecutionContext jobExecutionContext) { private WorkflowStats processWebAnalytics() {
WebAnalyticsWorkflow workflow = new WebAnalyticsWorkflow(timestamp, batchSize, backfill); WebAnalyticsWorkflow workflow = new WebAnalyticsWorkflow(timestamp, batchSize, backfill);
WorkflowStats workflowStats = workflow.getWorkflowStats();
try { try {
workflow.process(); workflow.process();
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {
jobData.setStatus(EventPublisherJob.Status.FAILED); jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(ex.getIndexingError()); jobData.setFailure(ex.getIndexingError());
} finally {
WorkflowStats workflowStats = workflow.getWorkflowStats();
for (Map.Entry<String, StepStats> entry : workflowStats.getWorkflowStepStats().entrySet()) {
String stepName = entry.getKey();
StepStats stats = entry.getValue();
updateStats(stepName, stats);
}
if (workflowStats.getFailures().isEmpty()) {
IndexingError indexingError =
new IndexingError()
.withErrorSource(IndexingError.ErrorSource.JOB)
.withMessage(
String.format(
"WebAnalytics Workflow Has encounter issues: %s",
workflowStats.getFailures()));
jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(indexingError);
}
sendUpdates(jobExecutionContext);
} }
return workflowStats;
} }
private void processCostAnalysis(JobExecutionContext jobExecutionContext) { private WorkflowStats processCostAnalysis() {
// TODO: Actually implement Backfill
CostAnalysisWorkflow workflow = new CostAnalysisWorkflow(timestamp, batchSize, backfill); CostAnalysisWorkflow workflow = new CostAnalysisWorkflow(timestamp, batchSize, backfill);
WorkflowStats workflowStats = workflow.getWorkflowStats();
try { try {
workflow.process(); workflow.process();
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {
jobData.setStatus(EventPublisherJob.Status.FAILED); jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(ex.getIndexingError()); jobData.setFailure(ex.getIndexingError());
} finally {
WorkflowStats workflowStats = workflow.getWorkflowStats();
for (Map.Entry<String, StepStats> entry : workflowStats.getWorkflowStepStats().entrySet()) {
String stepName = entry.getKey();
StepStats stats = entry.getValue();
updateStats(stepName, stats);
}
if (workflowStats.getFailures().isEmpty()) {
IndexingError indexingError =
new IndexingError()
.withErrorSource(IndexingError.ErrorSource.JOB)
.withMessage(
String.format(
"CostAnalysis Workflow Has encounter issues: %s",
workflowStats.getFailures()));
jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(indexingError);
}
sendUpdates(jobExecutionContext);
} }
return workflowStats;
} }
private void processDataAssets(JobExecutionContext jobExecutionContext) { private WorkflowStats processDataAssets() {
DataAssetsWorkflow workflow = DataAssetsWorkflow workflow =
new DataAssetsWorkflow(timestamp, batchSize, backfill, collectionDAO, searchRepository); new DataAssetsWorkflow(timestamp, batchSize, backfill, collectionDAO, searchRepository);
WorkflowStats workflowStats = workflow.getWorkflowStats();
try { try {
workflow.process(); workflow.process();
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {
jobData.setStatus(EventPublisherJob.Status.FAILED); jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(ex.getIndexingError()); jobData.setFailure(ex.getIndexingError());
} finally { }
WorkflowStats workflowStats = workflow.getWorkflowStats();
for (Map.Entry<String, StepStats> entry : workflowStats.getWorkflowStepStats().entrySet()) { return workflowStats;
String stepName = entry.getKey(); }
StepStats stats = entry.getValue();
updateStats(stepName, stats);
}
if (workflowStats.getFailures().isEmpty()) { private void updateJobStatsWithWorkflowStats(WorkflowStats workflowStats) {
IndexingError indexingError = for (Map.Entry<String, StepStats> entry : workflowStats.getWorkflowStepStats().entrySet()) {
new IndexingError() String stepName = entry.getKey();
.withErrorSource(IndexingError.ErrorSource.JOB) StepStats stats = entry.getValue();
.withMessage( updateStats(stepName, stats);
String.format(
"DataAssets Workflow Has encounter issues: %s",
workflowStats.getFailures()));
jobData.setStatus(EventPublisherJob.Status.FAILED);
jobData.setFailure(indexingError);
}
sendUpdates(jobExecutionContext);
} }
} }

View File

@ -10,10 +10,15 @@ import lombok.Getter;
import org.openmetadata.schema.system.StepStats; import org.openmetadata.schema.system.StepStats;
public class WorkflowStats { public class WorkflowStats {
@Getter private final String name;
@Getter private List<String> failures = new ArrayList<>(); @Getter private List<String> failures = new ArrayList<>();
@Getter private StepStats workflowStats = new StepStats(); @Getter private StepStats workflowStats = new StepStats();
@Getter private final Map<String, StepStats> workflowStepStats = new HashMap<>(); @Getter private final Map<String, StepStats> workflowStepStats = new HashMap<>();
public WorkflowStats(String name) {
this.name = name;
}
public void setWorkflowStatsTotalRecords(int totalRecords) { public void setWorkflowStatsTotalRecords(int totalRecords) {
workflowStats.setTotalRecords(totalRecords); workflowStats.setTotalRecords(totalRecords);
} }

View File

@ -64,7 +64,7 @@ public class CostAnalysisWorkflow {
@Getter @Getter
private AggregatedCostAnalysisReportDataProcessor aggregatedCostAnalysisReportDataProcessor; private AggregatedCostAnalysisReportDataProcessor aggregatedCostAnalysisReportDataProcessor;
@Getter private final WorkflowStats workflowStats = new WorkflowStats(); @Getter private final WorkflowStats workflowStats = new WorkflowStats("CostAnalysisWorkflow");
public CostAnalysisWorkflow( public CostAnalysisWorkflow(
Long timestamp, int batchSize, Optional<DataInsightsApp.Backfill> backfill) { Long timestamp, int batchSize, Optional<DataInsightsApp.Backfill> backfill) {
@ -114,7 +114,7 @@ public class CostAnalysisWorkflow {
new PaginatedEntitiesSource(Entity.TABLE, batchSize, List.of("*"), filter) new PaginatedEntitiesSource(Entity.TABLE, batchSize, List.of("*"), filter)
.withName( .withName(
String.format( String.format(
"PaginatedEntitiesSource-%s", databaseService.getFullyQualifiedName()))); "[CostAnalysisWorkflow] %s", databaseService.getFullyQualifiedName())));
total += total +=
((TableRepository) Entity.getEntityRepository(Entity.TABLE)) ((TableRepository) Entity.getEntityRepository(Entity.TABLE))
.getDao() .getDao()
@ -204,7 +204,8 @@ public class CostAnalysisWorkflow {
contextData.put(REPORT_DATA_TYPE_KEY, ReportData.ReportDataType.RAW_COST_ANALYSIS_REPORT_DATA); contextData.put(REPORT_DATA_TYPE_KEY, ReportData.ReportDataType.RAW_COST_ANALYSIS_REPORT_DATA);
CreateReportDataProcessor createReportdataProcessor = CreateReportDataProcessor createReportdataProcessor =
new CreateReportDataProcessor( new CreateReportDataProcessor(
rawCostAnalysisReportDataList.size(), "RawCostAnalysisReportDataProcessor"); rawCostAnalysisReportDataList.size(),
"[CostAnalysisWorkflow] Raw Cost Analysis Report Data Processor");
Optional<List<ReportData>> rawCostAnalysisReportData = Optional.empty(); Optional<List<ReportData>> rawCostAnalysisReportData = Optional.empty();
@ -226,7 +227,8 @@ public class CostAnalysisWorkflow {
if (rawCostAnalysisReportData.isPresent()) { if (rawCostAnalysisReportData.isPresent()) {
ReportDataSink reportDataSink = ReportDataSink reportDataSink =
new ReportDataSink( new ReportDataSink(
rawCostAnalysisReportData.get().size(), "RawCostAnalysisReportDataSink"); rawCostAnalysisReportData.get().size(),
"[CostAnalysisWorkflow] Raw Cost Analysis Report Data " + "Sink");
try { try {
reportDataSink.write(rawCostAnalysisReportData.get(), contextData); reportDataSink.write(rawCostAnalysisReportData.get(), contextData);
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {
@ -277,7 +279,7 @@ public class CostAnalysisWorkflow {
CreateReportDataProcessor createReportdataProcessor = CreateReportDataProcessor createReportdataProcessor =
new CreateReportDataProcessor( new CreateReportDataProcessor(
aggregatedCostAnalysisReportDataList.get().size(), aggregatedCostAnalysisReportDataList.get().size(),
"AggregatedCostAnalysisReportDataProcessor"); "[CostAnalysisWorkflow] Aggregated Cost Analysis Report Data Processor");
Optional<List<ReportData>> aggregatedCostAnalysisReportData = Optional.empty(); Optional<List<ReportData>> aggregatedCostAnalysisReportData = Optional.empty();
try { try {
@ -300,7 +302,7 @@ public class CostAnalysisWorkflow {
ReportDataSink reportDataSink = ReportDataSink reportDataSink =
new ReportDataSink( new ReportDataSink(
aggregatedCostAnalysisReportData.get().size(), aggregatedCostAnalysisReportData.get().size(),
"AggregatedCostAnalysisReportDataSink"); "[CostAnalysisWorkflow] Aggregated Cost Analysis Report Data Sink");
try { try {
reportDataSink.write(aggregatedCostAnalysisReportData.get(), contextData); reportDataSink.write(aggregatedCostAnalysisReportData.get(), contextData);
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {

View File

@ -22,7 +22,10 @@ public class AggregatedCostAnalysisReportDataAggregator
implements Processor< implements Processor<
List<AggregatedCostAnalysisReportData>, List<AggregatedCostAnalysisReportData>,
Map<String, Map<String, Map<String, CostAnalysisWorkflow.AggregatedCostAnalysisData>>>> { Map<String, Map<String, Map<String, CostAnalysisWorkflow.AggregatedCostAnalysisData>>>> {
@Getter private final String name = "AggregatedCostAnlysisReportDataAggregator"; @Getter
private final String name =
"[CostAnalysisWorkflow] Aggregated Cost Anlysis Report Data Aggregator";
private final StepStats stats = new StepStats(); private final StepStats stats = new StepStats();
public AggregatedCostAnalysisReportDataAggregator(int total) { public AggregatedCostAnalysisReportDataAggregator(int total) {
@ -93,7 +96,9 @@ public class AggregatedCostAnalysisReportDataAggregator
.withSubmittedCount(input.size()) .withSubmittedCount(input.size())
.withFailedCount(input.size()) .withFailedCount(input.size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Aggregated Cost Analysis Aggregator Encounter Failure.") .withMessage(
String.format(
"Aggregated Cost Analysis Aggregator Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[AggregatedCostAnalysisAggregator] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[AggregatedCostAnalysisAggregator] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -271,7 +271,9 @@ public class AggregatedCostAnalysisReportDataProcessor
.withSubmittedCount(input.size()) .withSubmittedCount(input.size())
.withFailedCount(input.size()) .withFailedCount(input.size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Aggregated Cost Analysis Processor Encounter Failure.") .withMessage(
String.format(
"Aggregated Cost Analysis Processor Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[AggregatedCostAnalysisProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[AggregatedCostAnalysisProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -80,7 +80,9 @@ public class DatabaseServiceTablesProcessor
.withSubmittedCount(input.getData().size()) .withSubmittedCount(input.getData().size())
.withFailedCount(input.getData().size()) .withFailedCount(input.getData().size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Database Service Tables Processor Encounter Failure.") .withMessage(
String.format(
"Database Service Tables Processor Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[DatabaseServiceTAblesProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[DatabaseServiceTAblesProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -48,7 +48,9 @@ public class RawCostAnalysisReportDataProcessor
.withSubmittedCount(input.size()) .withSubmittedCount(input.size())
.withFailedCount(input.size()) .withFailedCount(input.size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Raw Cost Analysis Processor Encounter Failure.") .withMessage(
String.format(
"Raw Cost Analysis Processor Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug("[RawCostAnalysisProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); LOG.debug("[RawCostAnalysisProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));
updateStats(0, input.size()); updateStats(0, input.size());

View File

@ -67,7 +67,7 @@ public class DataAssetsWorkflow {
private DataInsightsEntityEnricherProcessor entityEnricher; private DataInsightsEntityEnricherProcessor entityEnricher;
private Processor entityProcessor; private Processor entityProcessor;
private Sink searchIndexSink; private Sink searchIndexSink;
@Getter private final WorkflowStats workflowStats = new WorkflowStats(); @Getter private final WorkflowStats workflowStats = new WorkflowStats("DataAssetsWorkflow");
public DataAssetsWorkflow( public DataAssetsWorkflow(
Long timestamp, Long timestamp,
@ -115,7 +115,7 @@ public class DataAssetsWorkflow {
List<String> fields = List.of("*"); List<String> fields = List.of("*");
PaginatedEntitiesSource source = PaginatedEntitiesSource source =
new PaginatedEntitiesSource(entityType, batchSize, fields) new PaginatedEntitiesSource(entityType, batchSize, fields)
.withName(String.format("PaginatedEntitiesSource-%s", entityType)); .withName(String.format("[DataAssetsWorkflow] %s", entityType));
sources.add(source); sources.add(source);
}); });

View File

@ -64,7 +64,8 @@ public class DataInsightsEntityEnricherProcessor
.withSubmittedCount(input.getData().size()) .withSubmittedCount(input.getData().size())
.withFailedCount(input.getData().size()) .withFailedCount(input.getData().size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Entities Enricher Encountered Failure.") .withMessage(
String.format("Entities Enricher Encountered Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[DataInsightsEntityEnricherProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[DataInsightsEntityEnricherProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -53,7 +53,7 @@ public class WebAnalyticsWorkflow {
Long lastSession) {} Long lastSession) {}
; ;
@Getter private final WorkflowStats workflowStats = new WorkflowStats(); @Getter private final WorkflowStats workflowStats = new WorkflowStats("WebAnalyticsWorkflow");
public static final String USER_ACTIVITY_DATA_KEY = "userActivityData"; public static final String USER_ACTIVITY_DATA_KEY = "userActivityData";
public static final String USER_ACTIVITY_REPORT_DATA_KEY = "userActivityReportData"; public static final String USER_ACTIVITY_REPORT_DATA_KEY = "userActivityReportData";
public static final String ENTITY_VIEW_REPORT_DATA_KEY = "entityViewReportData"; public static final String ENTITY_VIEW_REPORT_DATA_KEY = "entityViewReportData";
@ -199,7 +199,8 @@ public class WebAnalyticsWorkflow {
REPORT_DATA_TYPE_KEY, ReportData.ReportDataType.WEB_ANALYTIC_ENTITY_VIEW_REPORT_DATA); REPORT_DATA_TYPE_KEY, ReportData.ReportDataType.WEB_ANALYTIC_ENTITY_VIEW_REPORT_DATA);
CreateReportDataProcessor createReportDataProcessor = CreateReportDataProcessor createReportDataProcessor =
new CreateReportDataProcessor( new CreateReportDataProcessor(
entityViewReportData.values().size(), "EntityViewReportDataProcessor"); entityViewReportData.values().size(),
"[WebAnalyticsWorkflow] Entity View Report Data Processor");
Optional<List<ReportData>> entityViewReportDataList = Optional.empty(); Optional<List<ReportData>> entityViewReportDataList = Optional.empty();
@ -220,7 +221,9 @@ public class WebAnalyticsWorkflow {
// Sink EntityView ReportData // Sink EntityView ReportData
if (entityViewReportDataList.isPresent()) { if (entityViewReportDataList.isPresent()) {
ReportDataSink reportDataSink = ReportDataSink reportDataSink =
new ReportDataSink(entityViewReportDataList.get().size(), "EntityViewReportDataSink"); new ReportDataSink(
entityViewReportDataList.get().size(),
"[WebAnalyticsWorkflow] Entity View Report Data Sink");
try { try {
reportDataSink.write(entityViewReportDataList.get(), contextData); reportDataSink.write(entityViewReportDataList.get(), contextData);
@ -262,7 +265,8 @@ public class WebAnalyticsWorkflow {
CreateReportDataProcessor createReportdataProcessor = CreateReportDataProcessor createReportdataProcessor =
new CreateReportDataProcessor( new CreateReportDataProcessor(
userActivityReportData.values().size(), "UserActivityReportDataProcessor"); userActivityReportData.values().size(),
"[WebAnalyticsWorkflow] User Activity Report Data Processor");
Optional<List<ReportData>> userActivityReportDataList = Optional.empty(); Optional<List<ReportData>> userActivityReportDataList = Optional.empty();
// Process UserActivity ReportData // Process UserActivity ReportData
@ -284,7 +288,9 @@ public class WebAnalyticsWorkflow {
if (userActivityReportDataList.isPresent()) { if (userActivityReportDataList.isPresent()) {
ReportDataSink reportDataSink = ReportDataSink reportDataSink =
new ReportDataSink(userActivityReportDataList.get().size(), "UserActivityReportDataSink"); new ReportDataSink(
userActivityReportDataList.get().size(),
"[WebAnalyticsWorkflow] User Activity Report Data Sink");
try { try {
reportDataSink.write(userActivityReportDataList.get(), contextData); reportDataSink.write(userActivityReportDataList.get(), contextData);
} catch (SearchIndexException ex) { } catch (SearchIndexException ex) {

View File

@ -66,7 +66,9 @@ public class WebAnalyticsEntityViewProcessor
.withSubmittedCount(input.getData().size()) .withSubmittedCount(input.getData().size())
.withFailedCount(input.getData().size()) .withFailedCount(input.getData().size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("WebAnalytics Entity View Processor Encounter Failure.") .withMessage(
String.format(
"WebAnalytics Entity View Processor Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[WebAnalyticsEntityViewProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[WebAnalyticsEntityViewProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -24,7 +24,7 @@ public class WebAnalyticsUserActivityAggregator
implements Processor< implements Processor<
Map<UUID, WebAnalyticUserActivityReportData>, Map<UUID, WebAnalyticUserActivityReportData>,
Map<UUID, WebAnalyticsWorkflow.UserActivityData>> { Map<UUID, WebAnalyticsWorkflow.UserActivityData>> {
@Getter private final String name = "WebAnalyticsUserActivityAggregator"; @Getter private final String name = "[WebAnalyticsWorkflow] User Activity Aggregator";
private final StepStats stats = new StepStats(); private final StepStats stats = new StepStats();
public WebAnalyticsUserActivityAggregator(int total) { public WebAnalyticsUserActivityAggregator(int total) {
@ -48,7 +48,10 @@ public class WebAnalyticsUserActivityAggregator
.withSubmittedCount(input.size()) .withSubmittedCount(input.size())
.withFailedCount(input.size()) .withFailedCount(input.size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("Web Analytics User Activity Aggregator Encounter Failure.") .withMessage(
String.format(
"Web Analytics User Activity Aggregator Encounter Failure: %s",
e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[WebAnalyticsUserActivityAggregator] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[WebAnalyticsUserActivityAggregator] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -49,7 +49,9 @@ public class WebAnalyticsUserActivityProcessor
.withSubmittedCount(input.getData().size()) .withSubmittedCount(input.getData().size())
.withFailedCount(input.getData().size()) .withFailedCount(input.getData().size())
.withSuccessCount(0) .withSuccessCount(0)
.withMessage("WebAnalytics User Activity Processor Encounter Failure.") .withMessage(
String.format(
"WebAnalytics User Activity Processor Encounter Failure: %s", e.getMessage()))
.withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e)); .withStackTrace(ExceptionUtils.exceptionStackTraceAsString(e));
LOG.debug( LOG.debug(
"[WebAnalyticsUserActivityProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error)); "[WebAnalyticsUserActivityProcessor] Failed. Details: {}", JsonUtils.pojoToJson(error));

View File

@ -15,6 +15,7 @@ import org.openmetadata.schema.analytics.type.WebAnalyticEventType;
import org.openmetadata.schema.system.IndexingError; import org.openmetadata.schema.system.IndexingError;
import org.openmetadata.schema.system.StepStats; import org.openmetadata.schema.system.StepStats;
import org.openmetadata.service.Entity; import org.openmetadata.service.Entity;
import org.openmetadata.service.apps.bundles.insights.utils.TimestampUtils;
import org.openmetadata.service.exception.SearchIndexException; import org.openmetadata.service.exception.SearchIndexException;
import org.openmetadata.service.jdbi3.WebAnalyticEventRepository; import org.openmetadata.service.jdbi3.WebAnalyticEventRepository;
import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.RestUtil;
@ -46,7 +47,10 @@ public class PaginatedWebAnalyticEventDataSource
this.batchSize = batchSize; this.batchSize = batchSize;
this.startTs = startTs; this.startTs = startTs;
this.endTs = endTs; this.endTs = endTs;
this.name = String.format("PaginatedWebAnalyticEventDataSource-%s-%s", startTs, endTs); this.name =
String.format(
"[WebAnalyticsWorkflow] Event Data Source %s",
TimestampUtils.timestampToString(startTs, "YYYY-MM-dd"));
this.totalRecords = repository.listWebAnalyticEventDataCount(eventType, startTs, endTs, false); this.totalRecords = repository.listWebAnalyticEventDataCount(eventType, startTs, endTs, false);
this.stats.withTotalRecords(totalRecords).withSuccessRecords(0).withFailedRecords(0); this.stats.withTotalRecords(totalRecords).withSuccessRecords(0).withFailedRecords(0);
} }