mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-09-09 09:02:12 +00:00
parent
c53c7b680d
commit
ec6ff78b80
@ -15,8 +15,8 @@ package org.openmetadata.service.exception;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class ReaderException extends IOException {
|
public class SinkException extends IOException {
|
||||||
public ReaderException(String msg, Throwable throwable) {
|
public SinkException(String msg, Throwable throwable) {
|
||||||
super(msg, throwable);
|
super(msg, throwable);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -15,8 +15,8 @@ package org.openmetadata.service.exception;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class WriterException extends IOException {
|
public class SourceException extends IOException {
|
||||||
public WriterException(String msg, Throwable throwable) {
|
public SourceException(String msg, Throwable throwable) {
|
||||||
super(msg, throwable);
|
super(msg, throwable);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -14,8 +14,8 @@
|
|||||||
package org.openmetadata.service.workflows.interfaces;
|
package org.openmetadata.service.workflows.interfaces;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.openmetadata.service.exception.WriterException;
|
import org.openmetadata.service.exception.SinkException;
|
||||||
|
|
||||||
public interface Sink<I, O> extends Stats {
|
public interface Sink<I, O> extends Stats {
|
||||||
O write(I data, Map<String, Object> contextData) throws WriterException;
|
O write(I data, Map<String, Object> contextData) throws SinkException;
|
||||||
}
|
}
|
||||||
|
@ -14,10 +14,10 @@
|
|||||||
package org.openmetadata.service.workflows.interfaces;
|
package org.openmetadata.service.workflows.interfaces;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.openmetadata.service.exception.ReaderException;
|
import org.openmetadata.service.exception.SourceException;
|
||||||
|
|
||||||
public interface Source<R> extends Stats {
|
public interface Source<R> extends Stats {
|
||||||
R readNext(Map<String, Object> contextData) throws ReaderException;
|
R readNext(Map<String, Object> contextData) throws SourceException;
|
||||||
|
|
||||||
void reset();
|
void reset();
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ public class EsDataInsightProcessor implements Processor<ResultList<ReportData>,
|
|||||||
public BulkRequest process(ResultList<ReportData> input, Map<String, Object> contextData) throws ProcessorException {
|
public BulkRequest process(ResultList<ReportData> input, Map<String, Object> contextData) throws ProcessorException {
|
||||||
String entityType = (String) contextData.get(ENTITY_TYPE_KEY);
|
String entityType = (String) contextData.get(ENTITY_TYPE_KEY);
|
||||||
if (CommonUtil.nullOrEmpty(entityType)) {
|
if (CommonUtil.nullOrEmpty(entityType)) {
|
||||||
throw new IllegalArgumentException("[EsEntitiesProcessor] entityType cannot be null or empty.");
|
throw new IllegalArgumentException("[EsDataInsightProcessor] entityType cannot be null or empty.");
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
|
@ -16,7 +16,6 @@ package org.openmetadata.service.workflows.searchIndex;
|
|||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getSuccessFromBulkResponse;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getSuccessFromBulkResponse;
|
||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getUpdatedStats;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getUpdatedStats;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.elasticsearch.action.bulk.BulkRequest;
|
import org.elasticsearch.action.bulk.BulkRequest;
|
||||||
@ -24,21 +23,21 @@ import org.elasticsearch.action.bulk.BulkResponse;
|
|||||||
import org.elasticsearch.client.RequestOptions;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
import org.openmetadata.schema.system.StepStats;
|
import org.openmetadata.schema.system.StepStats;
|
||||||
import org.openmetadata.service.exception.WriterException;
|
import org.openmetadata.service.exception.SinkException;
|
||||||
import org.openmetadata.service.workflows.interfaces.Sink;
|
import org.openmetadata.service.workflows.interfaces.Sink;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class EsSearchIndexWriter implements Sink<BulkRequest, BulkResponse> {
|
public class EsSearchIndexSink implements Sink<BulkRequest, BulkResponse> {
|
||||||
private final StepStats stats = new StepStats();
|
private final StepStats stats = new StepStats();
|
||||||
private final RestHighLevelClient client;
|
private final RestHighLevelClient client;
|
||||||
|
|
||||||
EsSearchIndexWriter(RestHighLevelClient client) {
|
EsSearchIndexSink(RestHighLevelClient client) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BulkResponse write(BulkRequest data, Map<String, Object> contextData) throws WriterException {
|
public BulkResponse write(BulkRequest data, Map<String, Object> contextData) throws SinkException {
|
||||||
LOG.debug("[EsSearchIndexWriter] Processing a Batch of Size: {}", data.numberOfActions());
|
LOG.debug("[EsSearchIndexSink] Processing a Batch of Size: {}", data.numberOfActions());
|
||||||
try {
|
try {
|
||||||
BulkResponse response = client.bulk(data, RequestOptions.DEFAULT);
|
BulkResponse response = client.bulk(data, RequestOptions.DEFAULT);
|
||||||
int currentSuccess = getSuccessFromBulkResponse(response);
|
int currentSuccess = getSuccessFromBulkResponse(response);
|
||||||
@ -46,21 +45,21 @@ public class EsSearchIndexWriter implements Sink<BulkRequest, BulkResponse> {
|
|||||||
|
|
||||||
// Update Stats
|
// Update Stats
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"[EsSearchIndexWriter] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
"[EsSearchIndexSink] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
||||||
data.numberOfActions(),
|
data.numberOfActions(),
|
||||||
currentSuccess,
|
currentSuccess,
|
||||||
currentFailed);
|
currentFailed);
|
||||||
updateStats(currentSuccess, currentFailed);
|
updateStats(currentSuccess, currentFailed);
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
} catch (IOException e) {
|
} catch (Exception e) {
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"[EsSearchIndexWriter] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
"[EsSearchIndexSink] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
||||||
data.numberOfActions(),
|
data.numberOfActions(),
|
||||||
0,
|
0,
|
||||||
data.numberOfActions());
|
data.numberOfActions());
|
||||||
updateStats(0, data.numberOfActions());
|
updateStats(0, data.numberOfActions());
|
||||||
throw new WriterException("[EsSearchIndexWriter] Batch encountered Exception. Failing Completely", e);
|
throw new SinkException("[EsSearchIndexSink] Batch encountered Exception. Failing Completely", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -22,14 +22,14 @@ import lombok.Getter;
|
|||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.openmetadata.schema.analytics.ReportData;
|
import org.openmetadata.schema.analytics.ReportData;
|
||||||
import org.openmetadata.schema.system.StepStats;
|
import org.openmetadata.schema.system.StepStats;
|
||||||
import org.openmetadata.service.exception.ReaderException;
|
import org.openmetadata.service.exception.SourceException;
|
||||||
import org.openmetadata.service.jdbi3.CollectionDAO;
|
import org.openmetadata.service.jdbi3.CollectionDAO;
|
||||||
import org.openmetadata.service.util.RestUtil;
|
import org.openmetadata.service.util.RestUtil;
|
||||||
import org.openmetadata.service.util.ResultList;
|
import org.openmetadata.service.util.ResultList;
|
||||||
import org.openmetadata.service.workflows.interfaces.Source;
|
import org.openmetadata.service.workflows.interfaces.Source;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PaginatedDataInsightReader implements Source<ResultList<ReportData>> {
|
public class PaginatedDataInsightSource implements Source<ResultList<ReportData>> {
|
||||||
private final CollectionDAO dao;
|
private final CollectionDAO dao;
|
||||||
@Getter private final String entityType;
|
@Getter private final String entityType;
|
||||||
@Getter private final int batchSize;
|
@Getter private final int batchSize;
|
||||||
@ -37,14 +37,15 @@ public class PaginatedDataInsightReader implements Source<ResultList<ReportData>
|
|||||||
private String cursor = null;
|
private String cursor = null;
|
||||||
@Getter private boolean isDone = false;
|
@Getter private boolean isDone = false;
|
||||||
|
|
||||||
public PaginatedDataInsightReader(CollectionDAO dao, String entityType, int batchSize) {
|
public PaginatedDataInsightSource(CollectionDAO dao, String entityType, int batchSize) {
|
||||||
this.dao = dao;
|
this.dao = dao;
|
||||||
this.entityType = entityType;
|
this.entityType = entityType;
|
||||||
this.batchSize = batchSize;
|
this.batchSize = batchSize;
|
||||||
|
stats.setTotalRecords(dao.entityExtensionTimeSeriesDao().listCount(entityType));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResultList<ReportData> readNext(Map<String, Object> contextData) throws ReaderException {
|
public ResultList<ReportData> readNext(Map<String, Object> contextData) throws SourceException {
|
||||||
if (!isDone) {
|
if (!isDone) {
|
||||||
ResultList<ReportData> data = read(cursor);
|
ResultList<ReportData> data = read(cursor);
|
||||||
cursor = data.getPaging().getAfter();
|
cursor = data.getPaging().getAfter();
|
||||||
@ -63,7 +64,7 @@ public class PaginatedDataInsightReader implements Source<ResultList<ReportData>
|
|||||||
isDone = false;
|
isDone = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ResultList<ReportData> read(String afterCursor) throws ReaderException {
|
private ResultList<ReportData> read(String afterCursor) throws SourceException {
|
||||||
LOG.debug("[DataInsightReader] Fetching a Batch of Size: {} ", batchSize);
|
LOG.debug("[DataInsightReader] Fetching a Batch of Size: {} ", batchSize);
|
||||||
ResultList<ReportData> result;
|
ResultList<ReportData> result;
|
||||||
try {
|
try {
|
||||||
@ -76,8 +77,13 @@ public class PaginatedDataInsightReader implements Source<ResultList<ReportData>
|
|||||||
updateStats(result.getData().size(), result.getErrors().size());
|
updateStats(result.getData().size(), result.getErrors().size());
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
LOG.debug("[DataInsightReader] Batch Stats :- Submitted : {} Success: {} Failed: {}", batchSize, 0, batchSize);
|
LOG.debug("[DataInsightReader] Batch Stats :- Submitted : {} Success: {} Failed: {}", batchSize, 0, batchSize);
|
||||||
|
if (stats.getTotalRecords() - stats.getProcessedRecords() <= batchSize) {
|
||||||
|
isDone = true;
|
||||||
|
updateStats(0, stats.getTotalRecords() - stats.getProcessedRecords());
|
||||||
|
} else {
|
||||||
updateStats(0, batchSize);
|
updateStats(0, batchSize);
|
||||||
throw new ReaderException("[EntitiesReader] Batch encountered Exception. Failing Completely.", ex);
|
}
|
||||||
|
throw new SourceException("[EntitiesReader] Batch encountered Exception. Failing Completely.", ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@ -104,7 +110,7 @@ public class PaginatedDataInsightReader implements Source<ResultList<ReportData>
|
|||||||
for (CollectionDAO.ReportDataRow reportDataRow : reportDataRowList) {
|
for (CollectionDAO.ReportDataRow reportDataRow : reportDataRowList) {
|
||||||
reportDataList.add(reportDataRow.getReportData());
|
reportDataList.add(reportDataRow.getReportData());
|
||||||
}
|
}
|
||||||
return new ResultList<>(reportDataList, beforeCursor, afterCursor, total);
|
return new ResultList<>(reportDataList, new ArrayList<>(), beforeCursor, afterCursor, total);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
@ -24,14 +24,14 @@ import org.openmetadata.schema.EntityInterface;
|
|||||||
import org.openmetadata.schema.system.StepStats;
|
import org.openmetadata.schema.system.StepStats;
|
||||||
import org.openmetadata.schema.type.Include;
|
import org.openmetadata.schema.type.Include;
|
||||||
import org.openmetadata.service.Entity;
|
import org.openmetadata.service.Entity;
|
||||||
import org.openmetadata.service.exception.ReaderException;
|
import org.openmetadata.service.exception.SourceException;
|
||||||
import org.openmetadata.service.jdbi3.EntityRepository;
|
import org.openmetadata.service.jdbi3.EntityRepository;
|
||||||
import org.openmetadata.service.jdbi3.ListFilter;
|
import org.openmetadata.service.jdbi3.ListFilter;
|
||||||
import org.openmetadata.service.util.ResultList;
|
import org.openmetadata.service.util.ResultList;
|
||||||
import org.openmetadata.service.workflows.interfaces.Source;
|
import org.openmetadata.service.workflows.interfaces.Source;
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class PaginatedEntitiesReader implements Source<ResultList<? extends EntityInterface>> {
|
public class PaginatedEntitiesSource implements Source<ResultList<? extends EntityInterface>> {
|
||||||
@Getter private final int batchSize;
|
@Getter private final int batchSize;
|
||||||
@Getter private final String entityType;
|
@Getter private final String entityType;
|
||||||
@Getter private final List<String> fields;
|
@Getter private final List<String> fields;
|
||||||
@ -39,14 +39,15 @@ public class PaginatedEntitiesReader implements Source<ResultList<? extends Enti
|
|||||||
private String cursor = null;
|
private String cursor = null;
|
||||||
@Getter private boolean isDone = false;
|
@Getter private boolean isDone = false;
|
||||||
|
|
||||||
PaginatedEntitiesReader(String entityType, int batchSize, List<String> fields) {
|
PaginatedEntitiesSource(String entityType, int batchSize, List<String> fields) {
|
||||||
this.entityType = entityType;
|
this.entityType = entityType;
|
||||||
this.batchSize = batchSize;
|
this.batchSize = batchSize;
|
||||||
this.fields = fields;
|
this.fields = fields;
|
||||||
|
this.stats.setTotalRecords(Entity.getEntityRepository(entityType).dao.listTotalCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResultList<? extends EntityInterface> readNext(Map<String, Object> contextData) throws ReaderException {
|
public ResultList<? extends EntityInterface> readNext(Map<String, Object> contextData) throws SourceException {
|
||||||
if (!isDone) {
|
if (!isDone) {
|
||||||
ResultList<? extends EntityInterface> data = read(cursor);
|
ResultList<? extends EntityInterface> data = read(cursor);
|
||||||
cursor = data.getPaging().getAfter();
|
cursor = data.getPaging().getAfter();
|
||||||
@ -59,10 +60,10 @@ public class PaginatedEntitiesReader implements Source<ResultList<? extends Enti
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ResultList<? extends EntityInterface> read(String cursor) throws ReaderException {
|
private ResultList<? extends EntityInterface> read(String cursor) throws SourceException {
|
||||||
LOG.debug("[EntitiesReader] Fetching a Batch of Size: {} ", batchSize);
|
LOG.debug("[PaginatedEntitiesSource] Fetching a Batch of Size: {} ", batchSize);
|
||||||
EntityRepository<?> entityRepository = Entity.getEntityRepository(entityType);
|
EntityRepository<?> entityRepository = Entity.getEntityRepository(entityType);
|
||||||
ResultList<? extends EntityInterface> result;
|
ResultList<? extends EntityInterface> result = null;
|
||||||
try {
|
try {
|
||||||
result =
|
result =
|
||||||
entityRepository.listAfterWithSkipFailure(
|
entityRepository.listAfterWithSkipFailure(
|
||||||
@ -70,20 +71,28 @@ public class PaginatedEntitiesReader implements Source<ResultList<? extends Enti
|
|||||||
if (result.getErrors().size() > 0) {
|
if (result.getErrors().size() > 0) {
|
||||||
result
|
result
|
||||||
.getErrors()
|
.getErrors()
|
||||||
.forEach((error) -> LOG.error("[EntitiesReader] Failed in getting Record, RECORD: {}", error.toString()));
|
.forEach(
|
||||||
|
(error) ->
|
||||||
|
LOG.error("[PaginatedEntitiesSource] Failed in getting Record, RECORD: {}", error.toString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.debug(
|
LOG.debug(
|
||||||
"[EntitiesReader] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
"[PaginatedEntitiesSource] Batch Stats :- Submitted : {} Success: {} Failed: {}",
|
||||||
batchSize,
|
batchSize,
|
||||||
result.getData().size(),
|
result.getData().size(),
|
||||||
result.getErrors().size());
|
result.getErrors().size());
|
||||||
updateStats(result.getData().size(), result.getErrors().size());
|
updateStats(result.getData().size(), result.getErrors().size());
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.debug("[EntitiesReader] Batch Stats :- Submitted : {} Success: {} Failed: {}", batchSize, 0, batchSize);
|
LOG.debug(
|
||||||
|
"[PaginatedEntitiesSource] Batch Stats :- Submitted : {} Success: {} Failed: {}", batchSize, 0, batchSize);
|
||||||
|
if (stats.getTotalRecords() - stats.getProcessedRecords() <= batchSize) {
|
||||||
|
isDone = true;
|
||||||
|
updateStats(0, stats.getTotalRecords() - stats.getProcessedRecords());
|
||||||
|
} else {
|
||||||
updateStats(0, batchSize);
|
updateStats(0, batchSize);
|
||||||
throw new ReaderException("[EntitiesReader] Batch encountered Exception. Failing Completely.", e);
|
}
|
||||||
|
throw new SourceException("[PaginatedEntitiesSource] Batch encountered Exception. Failing Completely.", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
@ -19,15 +19,16 @@ import org.elasticsearch.action.bulk.BulkResponse;
|
|||||||
import org.openmetadata.schema.system.StepStats;
|
import org.openmetadata.schema.system.StepStats;
|
||||||
import org.openmetadata.service.Entity;
|
import org.openmetadata.service.Entity;
|
||||||
import org.openmetadata.service.elasticsearch.ElasticSearchIndexDefinition;
|
import org.openmetadata.service.elasticsearch.ElasticSearchIndexDefinition;
|
||||||
|
import org.openmetadata.service.jdbi3.CollectionDAO;
|
||||||
import org.openmetadata.service.jdbi3.EntityRepository;
|
import org.openmetadata.service.jdbi3.EntityRepository;
|
||||||
|
|
||||||
public class ReindexingUtil {
|
public class ReindexingUtil {
|
||||||
public static final String ENTITY_TYPE_KEY = "entityType";
|
public static final String ENTITY_TYPE_KEY = "entityType";
|
||||||
|
|
||||||
public static void getUpdatedStats(StepStats stats, int currentSuccess, int currentFailed) {
|
public static void getUpdatedStats(StepStats stats, int currentSuccess, int currentFailed) {
|
||||||
stats.setTotalRecords(stats.getTotalRecords() + currentSuccess + currentFailed);
|
stats.setProcessedRecords(stats.getProcessedRecords() + currentSuccess + currentFailed);
|
||||||
stats.setTotalSuccessRecords(stats.getTotalSuccessRecords() + currentSuccess);
|
stats.setSuccessRecords(stats.getSuccessRecords() + currentSuccess);
|
||||||
stats.setTotalFailedRecords(stats.getTotalFailedRecords() + currentFailed);
|
stats.setFailedRecords(stats.getFailedRecords() + currentFailed);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isDataInsightIndex(String entityType) {
|
public static boolean isDataInsightIndex(String entityType) {
|
||||||
@ -36,11 +37,15 @@ public class ReindexingUtil {
|
|||||||
|| entityType.equalsIgnoreCase(ElasticSearchIndexDefinition.WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA);
|
|| entityType.equalsIgnoreCase(ElasticSearchIndexDefinition.WEB_ANALYTIC_USER_ACTIVITY_REPORT_DATA);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int getTotalRequestToProcess(Set<String> entities) {
|
public static int getTotalRequestToProcess(Set<String> entities, CollectionDAO dao) {
|
||||||
int total = 0;
|
int total = 0;
|
||||||
for (String entityType : entities) {
|
for (String entityType : entities) {
|
||||||
|
if (!isDataInsightIndex(entityType)) {
|
||||||
EntityRepository<?> repository = Entity.getEntityRepository(entityType);
|
EntityRepository<?> repository = Entity.getEntityRepository(entityType);
|
||||||
total += repository.dao.listTotalCount();
|
total += repository.dao.listTotalCount();
|
||||||
|
} else {
|
||||||
|
total += dao.entityExtensionTimeSeriesDao().listCount(entityType);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return total;
|
return total;
|
||||||
}
|
}
|
||||||
@ -51,7 +56,6 @@ public class ReindexingUtil {
|
|||||||
if (!bulkItemResponse.isFailed()) {
|
if (!bulkItemResponse.isFailed()) {
|
||||||
success++;
|
success++;
|
||||||
}
|
}
|
||||||
;
|
|
||||||
}
|
}
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ import static org.openmetadata.service.util.ReIndexingHandler.REINDEXING_JOB_EXT
|
|||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.ENTITY_TYPE_KEY;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.ENTITY_TYPE_KEY;
|
||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getSuccessFromBulkResponse;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getSuccessFromBulkResponse;
|
||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getTotalRequestToProcess;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getTotalRequestToProcess;
|
||||||
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.getUpdatedStats;
|
||||||
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.isDataInsightIndex;
|
import static org.openmetadata.service.workflows.searchIndex.ReindexingUtil.isDataInsightIndex;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
@ -44,8 +45,8 @@ import org.openmetadata.schema.system.Stats;
|
|||||||
import org.openmetadata.schema.system.StepStats;
|
import org.openmetadata.schema.system.StepStats;
|
||||||
import org.openmetadata.service.elasticsearch.ElasticSearchIndexDefinition;
|
import org.openmetadata.service.elasticsearch.ElasticSearchIndexDefinition;
|
||||||
import org.openmetadata.service.exception.ProcessorException;
|
import org.openmetadata.service.exception.ProcessorException;
|
||||||
import org.openmetadata.service.exception.ReaderException;
|
import org.openmetadata.service.exception.SinkException;
|
||||||
import org.openmetadata.service.exception.WriterException;
|
import org.openmetadata.service.exception.SourceException;
|
||||||
import org.openmetadata.service.jdbi3.CollectionDAO;
|
import org.openmetadata.service.jdbi3.CollectionDAO;
|
||||||
import org.openmetadata.service.socket.WebSocketManager;
|
import org.openmetadata.service.socket.WebSocketManager;
|
||||||
import org.openmetadata.service.util.JsonUtils;
|
import org.openmetadata.service.util.JsonUtils;
|
||||||
@ -54,11 +55,11 @@ import org.openmetadata.service.util.ResultList;
|
|||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SearchIndexWorkflow implements Runnable {
|
public class SearchIndexWorkflow implements Runnable {
|
||||||
private final List<PaginatedEntitiesReader> entitiesReaders = new ArrayList<>();
|
private final List<PaginatedEntitiesSource> paginatedEntitiesSources = new ArrayList<>();
|
||||||
private final List<PaginatedDataInsightReader> dataInsightReaders = new ArrayList<>();
|
private final List<PaginatedDataInsightSource> paginatedDataInsightSources = new ArrayList<>();
|
||||||
private final EsEntitiesProcessor entitiesProcessor;
|
private final EsEntitiesProcessor entitiesProcessor;
|
||||||
private final EsDataInsightProcessor dataInsightProcessor;
|
private final EsDataInsightProcessor dataInsightProcessor;
|
||||||
private final EsSearchIndexWriter writer;
|
private final EsSearchIndexSink searchIndexSink;
|
||||||
private final ElasticSearchIndexDefinition elasticSearchIndexDefinition;
|
private final ElasticSearchIndexDefinition elasticSearchIndexDefinition;
|
||||||
@Getter private final EventPublisherJob jobData;
|
@Getter private final EventPublisherJob jobData;
|
||||||
private final CollectionDAO dao;
|
private final CollectionDAO dao;
|
||||||
@ -78,143 +79,178 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
List<String> fields =
|
List<String> fields =
|
||||||
new ArrayList<>(
|
new ArrayList<>(
|
||||||
Objects.requireNonNull(getIndexFields(entityType, jobData.getSearchIndexMappingLanguage())));
|
Objects.requireNonNull(getIndexFields(entityType, jobData.getSearchIndexMappingLanguage())));
|
||||||
entitiesReaders.add(new PaginatedEntitiesReader(entityType, jobData.getBatchSize(), fields));
|
paginatedEntitiesSources.add(new PaginatedEntitiesSource(entityType, jobData.getBatchSize(), fields));
|
||||||
} else {
|
} else {
|
||||||
dataInsightReaders.add(new PaginatedDataInsightReader(dao, entityType, jobData.getBatchSize()));
|
paginatedDataInsightSources.add(
|
||||||
|
new PaginatedDataInsightSource(dao, entityType, jobData.getBatchSize()));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.entitiesProcessor = new EsEntitiesProcessor();
|
this.entitiesProcessor = new EsEntitiesProcessor();
|
||||||
this.dataInsightProcessor = new EsDataInsightProcessor();
|
this.dataInsightProcessor = new EsDataInsightProcessor();
|
||||||
this.writer = new EsSearchIndexWriter(client);
|
this.searchIndexSink = new EsSearchIndexSink(client);
|
||||||
this.elasticSearchIndexDefinition = elasticSearchIndexDefinition;
|
this.elasticSearchIndexDefinition = elasticSearchIndexDefinition;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
public void run() {
|
public void run() {
|
||||||
|
try {
|
||||||
LOG.info("Executing Reindexing Job with JobData : {}", jobData);
|
LOG.info("Executing Reindexing Job with JobData : {}", jobData);
|
||||||
|
|
||||||
// Update Job Status
|
// Update Job Status
|
||||||
jobData.setStatus(EventPublisherJob.Status.RUNNING);
|
jobData.setStatus(EventPublisherJob.Status.RUNNING);
|
||||||
|
|
||||||
// Run ReIndexing
|
// Run ReIndexing
|
||||||
entitiesReIndexer();
|
entitiesReIndex();
|
||||||
dataInsightReindexer();
|
dataInsightReindex();
|
||||||
|
|
||||||
// Mark Job as Completed
|
// Mark Job as Completed
|
||||||
updateJobStatus();
|
updateJobStatus();
|
||||||
jobData.setEndTime(System.currentTimeMillis());
|
jobData.setEndTime(System.currentTimeMillis());
|
||||||
|
} catch (Exception ex) {
|
||||||
|
String error =
|
||||||
|
String.format(
|
||||||
|
"Reindexing Job Has Encountered an Exception. \n Job Data: %s, \n Stack : %s ",
|
||||||
|
jobData.toString(), ExceptionUtils.getStackTrace(ex));
|
||||||
|
LOG.error(error);
|
||||||
|
jobData.setStatus(EventPublisherJob.Status.FAILED);
|
||||||
|
handleJobError("Failure in Job: Check Stack", error, System.currentTimeMillis());
|
||||||
|
} finally {
|
||||||
// store job details in Database
|
// store job details in Database
|
||||||
updateRecordToDb();
|
updateRecordToDb();
|
||||||
|
// Send update
|
||||||
|
sendUpdates();
|
||||||
|
// Remove list from active jobs
|
||||||
ReIndexingHandler.getInstance().removeCompletedJob(jobData.getId());
|
ReIndexingHandler.getInstance().removeCompletedJob(jobData.getId());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void entitiesReIndexer() {
|
private void entitiesReIndex() {
|
||||||
Map<String, Object> contextData = new HashMap<>();
|
Map<String, Object> contextData = new HashMap<>();
|
||||||
for (PaginatedEntitiesReader reader : entitiesReaders) {
|
for (PaginatedEntitiesSource paginatedEntitiesSource : paginatedEntitiesSources) {
|
||||||
reCreateIndexes(reader.getEntityType());
|
reCreateIndexes(paginatedEntitiesSource.getEntityType());
|
||||||
contextData.put(ENTITY_TYPE_KEY, reader.getEntityType());
|
contextData.put(ENTITY_TYPE_KEY, paginatedEntitiesSource.getEntityType());
|
||||||
ResultList<? extends EntityInterface> resultList;
|
ResultList<? extends EntityInterface> resultList;
|
||||||
while (!reader.isDone()) {
|
while (!paginatedEntitiesSource.isDone()) {
|
||||||
long currentTime = System.currentTimeMillis();
|
long currentTime = System.currentTimeMillis();
|
||||||
int requestToProcess = jobData.getBatchSize();
|
int requestToProcess = jobData.getBatchSize();
|
||||||
int failed = requestToProcess;
|
int failed = requestToProcess;
|
||||||
int success = 0;
|
int success = 0;
|
||||||
try {
|
try {
|
||||||
resultList = reader.readNext(null);
|
resultList = paginatedEntitiesSource.readNext(null);
|
||||||
requestToProcess = resultList.getData().size() + resultList.getErrors().size();
|
requestToProcess = resultList.getData().size() + resultList.getErrors().size();
|
||||||
|
if (resultList.getData().size() > 0) {
|
||||||
// process data to build Reindex Request
|
// process data to build Reindex Request
|
||||||
BulkRequest requests = entitiesProcessor.process(resultList, contextData);
|
BulkRequest requests = entitiesProcessor.process(resultList, contextData);
|
||||||
// write the data to ElasticSearch
|
// write the data to ElasticSearch
|
||||||
BulkResponse response = writer.write(requests, contextData);
|
BulkResponse response = searchIndexSink.write(requests, contextData);
|
||||||
// update Status
|
// update Status
|
||||||
handleErrors(resultList, response, currentTime);
|
handleErrors(resultList, response, currentTime);
|
||||||
// Update stats
|
// Update stats
|
||||||
success = getSuccessFromBulkResponse(response);
|
success = getSuccessFromBulkResponse(response);
|
||||||
failed = requestToProcess - success;
|
failed = requestToProcess - success;
|
||||||
} catch (ReaderException rx) {
|
} else {
|
||||||
handleReaderError(
|
failed = 0;
|
||||||
|
}
|
||||||
|
} catch (SourceException rx) {
|
||||||
|
handleSourceError(
|
||||||
rx.getMessage(),
|
rx.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", rx.getCause(), ExceptionUtils.getStackTrace(rx)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedEntitiesSource.getEntityType(), rx.getCause(), ExceptionUtils.getStackTrace(rx)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} catch (ProcessorException px) {
|
} catch (ProcessorException px) {
|
||||||
handleProcessorError(
|
handleProcessorError(
|
||||||
px.getMessage(),
|
px.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", px.getCause(), ExceptionUtils.getStackTrace(px)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedEntitiesSource.getEntityType(), px.getCause(), ExceptionUtils.getStackTrace(px)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} catch (WriterException wx) {
|
} catch (SinkException wx) {
|
||||||
handleEsError(
|
handleEsSinkError(
|
||||||
wx.getMessage(),
|
wx.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", wx.getCause(), ExceptionUtils.getStackTrace(wx)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedEntitiesSource.getEntityType(), wx.getCause(), ExceptionUtils.getStackTrace(wx)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} finally {
|
} finally {
|
||||||
updateStats(success, failed, reader.getStats(), entitiesProcessor.getStats(), writer.getStats());
|
updateStats(
|
||||||
try {
|
success,
|
||||||
WebSocketManager.getInstance()
|
failed,
|
||||||
.sendToOne(
|
paginatedEntitiesSource.getStats(),
|
||||||
jobData.getStartedBy(),
|
entitiesProcessor.getStats(),
|
||||||
WebSocketManager.JOB_STATUS_BROADCAST_CHANNEL,
|
searchIndexSink.getStats());
|
||||||
JsonUtils.pojoToJson(jobData));
|
sendUpdates();
|
||||||
} catch (JsonProcessingException ex) {
|
|
||||||
LOG.error("Failed to send updated stats with WebSocker", ex);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void dataInsightReindexer() {
|
private void dataInsightReindex() {
|
||||||
Map<String, Object> contextData = new HashMap<>();
|
Map<String, Object> contextData = new HashMap<>();
|
||||||
for (PaginatedDataInsightReader dataInsightReader : dataInsightReaders) {
|
for (PaginatedDataInsightSource paginatedDataInsightSource : paginatedDataInsightSources) {
|
||||||
reCreateIndexes(dataInsightReader.getEntityType());
|
reCreateIndexes(paginatedDataInsightSource.getEntityType());
|
||||||
contextData.put(ENTITY_TYPE_KEY, dataInsightReader.getEntityType());
|
contextData.put(ENTITY_TYPE_KEY, paginatedDataInsightSource.getEntityType());
|
||||||
ResultList<ReportData> resultList;
|
ResultList<ReportData> resultList;
|
||||||
while (!dataInsightReader.isDone()) {
|
while (!paginatedDataInsightSource.isDone()) {
|
||||||
long currentTime = System.currentTimeMillis();
|
long currentTime = System.currentTimeMillis();
|
||||||
int requestToProcess = jobData.getBatchSize();
|
int requestToProcess = jobData.getBatchSize();
|
||||||
int failed = requestToProcess;
|
int failed = requestToProcess;
|
||||||
int success = 0;
|
int success = 0;
|
||||||
try {
|
try {
|
||||||
resultList = dataInsightReader.readNext(null);
|
resultList = paginatedDataInsightSource.readNext(null);
|
||||||
requestToProcess = resultList.getData().size() + resultList.getErrors().size();
|
requestToProcess = resultList.getData().size() + resultList.getErrors().size();
|
||||||
|
if (resultList.getData().size() > 0) {
|
||||||
// process data to build Reindex Request
|
// process data to build Reindex Request
|
||||||
BulkRequest requests = dataInsightProcessor.process(resultList, contextData);
|
BulkRequest requests = dataInsightProcessor.process(resultList, contextData);
|
||||||
// write the data to ElasticSearch
|
// write the data to ElasticSearch
|
||||||
BulkResponse response = writer.write(requests, contextData);
|
// write the data to ElasticSearch
|
||||||
|
BulkResponse response = searchIndexSink.write(requests, contextData);
|
||||||
// update Status
|
// update Status
|
||||||
handleErrors(resultList, response, currentTime);
|
handleErrors(resultList, response, currentTime);
|
||||||
// Update stats
|
// Update stats
|
||||||
success = getSuccessFromBulkResponse(response);
|
success = getSuccessFromBulkResponse(response);
|
||||||
failed = requestToProcess - success;
|
failed = requestToProcess - success;
|
||||||
} catch (ReaderException rx) {
|
} else {
|
||||||
handleReaderError(
|
failed = 0;
|
||||||
|
}
|
||||||
|
} catch (SourceException rx) {
|
||||||
|
handleSourceError(
|
||||||
rx.getMessage(),
|
rx.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", rx.getCause(), ExceptionUtils.getStackTrace(rx)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedDataInsightSource.getEntityType(), rx.getCause(), ExceptionUtils.getStackTrace(rx)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} catch (ProcessorException px) {
|
} catch (ProcessorException px) {
|
||||||
handleProcessorError(
|
handleProcessorError(
|
||||||
px.getMessage(),
|
px.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", px.getCause(), ExceptionUtils.getStackTrace(px)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedDataInsightSource.getEntityType(), px.getCause(), ExceptionUtils.getStackTrace(px)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} catch (WriterException wx) {
|
} catch (SinkException wx) {
|
||||||
handleEsError(
|
handleEsSinkError(
|
||||||
wx.getMessage(),
|
wx.getMessage(),
|
||||||
String.format("Cause: %s \n Stack: %s", wx.getCause(), ExceptionUtils.getStackTrace(wx)),
|
String.format(
|
||||||
|
"EntityType: %s \n Cause: %s \n Stack: %s",
|
||||||
|
paginatedDataInsightSource.getEntityType(), wx.getCause(), ExceptionUtils.getStackTrace(wx)),
|
||||||
currentTime);
|
currentTime);
|
||||||
} finally {
|
} finally {
|
||||||
updateStats(
|
updateStats(
|
||||||
success, failed, dataInsightReader.getStats(), dataInsightProcessor.getStats(), writer.getStats());
|
success,
|
||||||
|
failed,
|
||||||
|
paginatedDataInsightSource.getStats(),
|
||||||
|
dataInsightProcessor.getStats(),
|
||||||
|
searchIndexSink.getStats());
|
||||||
|
sendUpdates();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void sendUpdates() {
|
||||||
try {
|
try {
|
||||||
WebSocketManager.getInstance()
|
WebSocketManager.getInstance()
|
||||||
.sendToOne(
|
.sendToOne(
|
||||||
jobData.getStartedBy(),
|
jobData.getStartedBy(), WebSocketManager.JOB_STATUS_BROADCAST_CHANNEL, JsonUtils.pojoToJson(jobData));
|
||||||
WebSocketManager.JOB_STATUS_BROADCAST_CHANNEL,
|
|
||||||
JsonUtils.pojoToJson(jobData));
|
|
||||||
} catch (JsonProcessingException ex) {
|
} catch (JsonProcessingException ex) {
|
||||||
LOG.error("Failed to send updated stats with WebSocker", ex);
|
LOG.error("Failed to send updated stats with WebSocket", ex);
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -226,21 +262,19 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
// Total Stats
|
// Total Stats
|
||||||
StepStats stats = jobData.getStats().getJobStats();
|
StepStats stats = jobData.getStats().getJobStats();
|
||||||
if (stats == null) {
|
if (stats == null) {
|
||||||
stats = new StepStats().withTotalRecords(getTotalRequestToProcess(jobData.getEntities()));
|
stats = new StepStats().withTotalRecords(getTotalRequestToProcess(jobData.getEntities(), dao));
|
||||||
}
|
}
|
||||||
stats.setTotalSuccessRecords(stats.getTotalSuccessRecords() + currentSuccess);
|
getUpdatedStats(stats, currentSuccess, currentFailed);
|
||||||
stats.setTotalFailedRecords(stats.getTotalFailedRecords() + currentFailed);
|
|
||||||
|
|
||||||
|
// Update for the Job
|
||||||
jobDataStats.setJobStats(stats);
|
jobDataStats.setJobStats(stats);
|
||||||
|
|
||||||
// Reader Stats
|
// Reader Stats
|
||||||
jobDataStats.setSourceStats(reader);
|
jobDataStats.setSourceStats(reader);
|
||||||
|
|
||||||
// Processor
|
// Processor
|
||||||
jobDataStats.setProcessorStats(processor);
|
jobDataStats.setProcessorStats(processor);
|
||||||
|
|
||||||
// Writer
|
// Writer
|
||||||
jobDataStats.setSinkStats(writer);
|
jobDataStats.setSinkStats(writer);
|
||||||
|
|
||||||
jobData.setStats(jobDataStats);
|
jobData.setStats(jobDataStats);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,11 +302,11 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void handleErrors(ResultList<?> data, BulkResponse response, long time) {
|
private void handleErrors(ResultList<?> data, BulkResponse response, long time) {
|
||||||
handleReaderError(data, time);
|
handleSourceError(data, time);
|
||||||
handleEsErrors(response, time);
|
handleEsSinkErrors(response, time);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleReaderError(String context, String reason, long time) {
|
private void handleSourceError(String context, String reason, long time) {
|
||||||
Failure failures = getFailure();
|
Failure failures = getFailure();
|
||||||
FailureDetails readerFailures = getFailureDetails(context, reason, time);
|
FailureDetails readerFailures = getFailureDetails(context, reason, time);
|
||||||
failures.setSourceError(readerFailures);
|
failures.setSourceError(readerFailures);
|
||||||
@ -286,18 +320,25 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
jobData.setFailure(failures);
|
jobData.setFailure(failures);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleEsError(String context, String reason, long time) {
|
private void handleEsSinkError(String context, String reason, long time) {
|
||||||
Failure failures = getFailure();
|
Failure failures = getFailure();
|
||||||
FailureDetails writerFailure = getFailureDetails(context, reason, time);
|
FailureDetails writerFailure = getFailureDetails(context, reason, time);
|
||||||
failures.setProcessorError(writerFailure);
|
failures.setSinkError(writerFailure);
|
||||||
|
jobData.setFailure(failures);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void handleJobError(String context, String reason, long time) {
|
||||||
|
Failure failures = getFailure();
|
||||||
|
FailureDetails jobFailure = getFailureDetails(context, reason, time);
|
||||||
|
failures.setJobError(jobFailure);
|
||||||
jobData.setFailure(failures);
|
jobData.setFailure(failures);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private void handleReaderError(ResultList<?> data, long time) {
|
private void handleSourceError(ResultList<?> data, long time) {
|
||||||
if (data.getErrors().size() > 0) {
|
if (data.getErrors().size() > 0) {
|
||||||
handleReaderError(
|
handleSourceError(
|
||||||
"ReaderContext: Encountered Error While Reading Data",
|
"SourceContext: Encountered Error While Reading Data",
|
||||||
String.format(
|
String.format(
|
||||||
"Following Entities were not fetched Successfully : %s", JsonUtils.pojoToJson(data.getErrors())),
|
"Following Entities were not fetched Successfully : %s", JsonUtils.pojoToJson(data.getErrors())),
|
||||||
time);
|
time);
|
||||||
@ -305,7 +346,7 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private void handleEsErrors(BulkResponse response, long time) {
|
private void handleEsSinkErrors(BulkResponse response, long time) {
|
||||||
List<FailureDetails> details = new ArrayList<>();
|
List<FailureDetails> details = new ArrayList<>();
|
||||||
for (BulkItemResponse bulkItemResponse : response) {
|
for (BulkItemResponse bulkItemResponse : response) {
|
||||||
if (bulkItemResponse.isFailed()) {
|
if (bulkItemResponse.isFailed()) {
|
||||||
@ -325,7 +366,7 @@ public class SearchIndexWorkflow implements Runnable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (details.size() > 0) {
|
if (details.size() > 0) {
|
||||||
handleEsError(
|
handleEsSinkError(
|
||||||
"[EsWriter] BulkResponseItems",
|
"[EsWriter] BulkResponseItems",
|
||||||
String.format("[BulkItemResponse] Got Following Error Responses: \n %s ", JsonUtils.pojoToJson(details)),
|
String.format("[BulkItemResponse] Got Following Error Responses: \n %s ", JsonUtils.pojoToJson(details)),
|
||||||
time);
|
time);
|
||||||
|
@ -34,12 +34,17 @@
|
|||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 0
|
"default": 0
|
||||||
},
|
},
|
||||||
"totalSuccessRecords": {
|
"processedRecords": {
|
||||||
|
"description": "Records that are processed in",
|
||||||
|
"type": "integer",
|
||||||
|
"default": 0
|
||||||
|
},
|
||||||
|
"successRecords": {
|
||||||
"description": "Count of Total Successfully Records",
|
"description": "Count of Total Successfully Records",
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 0
|
"default": 0
|
||||||
},
|
},
|
||||||
"totalFailedRecords": {
|
"failedRecords": {
|
||||||
"description": "Count of Total Failed Records",
|
"description": "Count of Total Failed Records",
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"default": 0
|
"default": 0
|
||||||
@ -140,6 +145,11 @@
|
|||||||
"type": "object",
|
"type": "object",
|
||||||
"$ref": "#/definitions/failureDetails",
|
"$ref": "#/definitions/failureDetails",
|
||||||
"default": null
|
"default": null
|
||||||
|
},
|
||||||
|
"jobError" : {
|
||||||
|
"type": "object",
|
||||||
|
"$ref": "#/definitions/failureDetails",
|
||||||
|
"default": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
|
Loading…
x
Reference in New Issue
Block a user