mirror of
https://github.com/datahub-project/datahub.git
synced 2025-08-15 04:37:03 +00:00
fix(ingest): spark-lineage - Adding additional debug logs to spark lineage (#5772)
This commit is contained in:
parent
c44fd626d8
commit
caec2ed235
@ -44,10 +44,13 @@ import datahub.spark.model.SQLQueryExecEndEvent;
|
||||
import datahub.spark.model.SQLQueryExecStartEvent;
|
||||
import datahub.spark.model.dataset.SparkDataset;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.spark.util.JsonProtocol;
|
||||
import org.json4s.jackson.JsonMethods$;
|
||||
import scala.collection.JavaConversions;
|
||||
import scala.runtime.AbstractFunction1;
|
||||
import scala.runtime.AbstractPartialFunction;
|
||||
|
||||
|
||||
@Slf4j
|
||||
public class DatahubSparkListener extends SparkListener {
|
||||
|
||||
@ -78,10 +81,31 @@ public class DatahubSparkListener extends SparkListener {
|
||||
this.sqlStart = sqlStart;
|
||||
this.plan = plan;
|
||||
this.ctx = ctx;
|
||||
|
||||
String jsonPlan = (plan != null) ? plan.toJSON() : null;
|
||||
String sqlStartJson =
|
||||
(sqlStart != null) ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) : null;
|
||||
log.debug("SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", sqlStartJson, jsonPlan, ctx);
|
||||
}
|
||||
|
||||
public void run() {
|
||||
appSqlDetails.get(ctx.applicationId()).put(sqlStart.executionId(),
|
||||
if (ctx == null) {
|
||||
log.error("Context is null skipping run");
|
||||
return;
|
||||
}
|
||||
|
||||
if (ctx.conf() == null) {
|
||||
log.error("Context does not have config. Skipping run");
|
||||
return;
|
||||
}
|
||||
|
||||
if (sqlStart == null) {
|
||||
log.error("sqlStart is null skipping run");
|
||||
return;
|
||||
}
|
||||
|
||||
appSqlDetails.get(ctx.applicationId())
|
||||
.put(sqlStart.executionId(),
|
||||
new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(),
|
||||
sqlStart.time(), sqlStart.executionId(), null));
|
||||
log.debug("PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n");
|
||||
@ -94,8 +118,8 @@ public class DatahubSparkListener extends SparkListener {
|
||||
return;
|
||||
}
|
||||
// Here assumption is that there will be only single target for single sql query
|
||||
DatasetLineage lineage = new DatasetLineage(sqlStart.description(), plan.toString(),
|
||||
outputDS.get().iterator().next());
|
||||
DatasetLineage lineage =
|
||||
new DatasetLineage(sqlStart.description(), plan.toString(), outputDS.get().iterator().next());
|
||||
Collection<QueryPlan<?>> allInners = new ArrayList<>();
|
||||
|
||||
plan.collect(new AbstractPartialFunction<LogicalPlan, Void>() {
|
||||
@ -140,8 +164,9 @@ public class DatahubSparkListener extends SparkListener {
|
||||
});
|
||||
}
|
||||
|
||||
SQLQueryExecStartEvent evt = new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx),
|
||||
ctx.applicationId(), sqlStart.time(), sqlStart.executionId(), lineage);
|
||||
SQLQueryExecStartEvent evt =
|
||||
new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(),
|
||||
sqlStart.time(), sqlStart.executionId(), lineage);
|
||||
|
||||
appSqlDetails.get(ctx.applicationId()).put(sqlStart.executionId(), evt);
|
||||
|
||||
@ -257,11 +282,13 @@ public class DatahubSparkListener extends SparkListener {
|
||||
public Void apply(SparkContext sc) {
|
||||
SQLQueryExecStartEvent start = appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId());
|
||||
if (start == null) {
|
||||
log.error("Execution end event received, but start event missing for appId/sql exec Id " + sc.applicationId()
|
||||
+ ":" + sqlEnd.executionId());
|
||||
log.error(
|
||||
"Execution end event received, but start event missing for appId/sql exec Id " + sc.applicationId() + ":"
|
||||
+ sqlEnd.executionId());
|
||||
} else if (start.getDatasetLineage() != null) {
|
||||
SQLQueryExecEndEvent evt = new SQLQueryExecEndEvent(LineageUtils.getMaster(sc), sc.appName(),
|
||||
sc.applicationId(), sqlEnd.time(), sqlEnd.executionId(), start);
|
||||
SQLQueryExecEndEvent evt =
|
||||
new SQLQueryExecEndEvent(LineageUtils.getMaster(sc), sc.appName(), sc.applicationId(), sqlEnd.time(),
|
||||
sqlEnd.executionId(), start);
|
||||
McpEmitter emitter = appEmitters.get(sc.applicationId());
|
||||
if (emitter != null) {
|
||||
emitter.accept(evt);
|
||||
@ -281,14 +308,13 @@ public class DatahubSparkListener extends SparkListener {
|
||||
appConfig.put(appId, datahubConf);
|
||||
Config pipelineConfig = datahubConf.hasPath(PIPELINE_KEY) ? datahubConf.getConfig(PIPELINE_KEY)
|
||||
: com.typesafe.config.ConfigFactory.empty();
|
||||
AppStartEvent evt = new AppStartEvent(LineageUtils.getMaster(ctx), getPipelineName(ctx), appId, ctx.startTime(),
|
||||
ctx.sparkUser(), pipelineConfig);
|
||||
AppStartEvent evt =
|
||||
new AppStartEvent(LineageUtils.getMaster(ctx), getPipelineName(ctx), appId, ctx.startTime(), ctx.sparkUser(),
|
||||
pipelineConfig);
|
||||
|
||||
appEmitters.computeIfAbsent(appId,
|
||||
s -> datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY)
|
||||
? new CoalesceJobsEmitter(datahubConf)
|
||||
: new McpEmitter(datahubConf))
|
||||
.accept(evt);
|
||||
s -> datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) ? new CoalesceJobsEmitter(
|
||||
datahubConf) : new McpEmitter(datahubConf)).accept(evt);
|
||||
consumers().forEach(c -> c.accept(evt));
|
||||
appDetails.put(appId, evt);
|
||||
appSqlDetails.put(appId, new ConcurrentHashMap<>());
|
||||
@ -329,10 +355,11 @@ public class DatahubSparkListener extends SparkListener {
|
||||
if (conf.contains(CONSUMER_TYPE_KEY)) {
|
||||
String consumerTypes = conf.get(CONSUMER_TYPE_KEY);
|
||||
return StreamSupport.stream(Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false)
|
||||
.map(x -> LineageUtils.getConsumer(x)).filter(Objects::nonNull).collect(Collectors.toList());
|
||||
.map(x -> LineageUtils.getConsumer(x))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList());
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user