mirror of
https://github.com/datahub-project/datahub.git
synced 2025-12-01 21:27:15 +00:00
fix(patch): update json patch library (#10449)
This commit is contained in:
parent
232b98968c
commit
21e3f5416c
@ -168,7 +168,8 @@ project.ext.externalDependency = [
|
||||
'jettison': 'org.codehaus.jettison:jettison:1.5.4',
|
||||
'jgrapht': 'org.jgrapht:jgrapht-core:1.5.1',
|
||||
'jna': 'net.java.dev.jna:jna:5.12.1',
|
||||
'jsonPatch': 'com.github.java-json-tools:json-patch:1.13',
|
||||
'jsonPatch': 'jakarta.json:jakarta.json-api:2.1.3',
|
||||
'jsonPathImpl': 'org.eclipse.parsson:parsson:1.1.6',
|
||||
'jsonSimple': 'com.googlecode.json-simple:json-simple:1.1.1',
|
||||
'jsonSmart': 'net.minidev:json-smart:2.4.9',
|
||||
'json': 'org.json:json:20231013',
|
||||
|
||||
@ -15,7 +15,10 @@ dependencies {
|
||||
implementation externalDependency.jacksonDataBind
|
||||
implementation externalDependency.jacksonDataFormatYaml
|
||||
implementation externalDependency.reflections
|
||||
api externalDependency.jsonPatch
|
||||
|
||||
implementation externalDependency.jsonPatch
|
||||
implementation externalDependency.jsonPathImpl
|
||||
|
||||
constraints {
|
||||
implementation(externalDependency.snakeYaml) {
|
||||
because("previous versions are vulnerable to CVE-2022-25857")
|
||||
@ -28,6 +31,7 @@ dependencies {
|
||||
|
||||
testImplementation project(':test-models')
|
||||
testImplementation project(path: ':test-models', configuration: 'testDataTemplate')
|
||||
testImplementation project(':metadata-utils')
|
||||
testImplementation externalDependency.testng
|
||||
testImplementation externalDependency.mockito
|
||||
testImplementation externalDependency.mockitoInline
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
package com.linkedin.metadata.aspect.batch;
|
||||
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
import com.linkedin.metadata.aspect.AspectRetriever;
|
||||
import jakarta.json.JsonPatch;
|
||||
|
||||
/**
|
||||
* A change proposal represented as a patch to an exiting stored object in the primary data store.
|
||||
@ -17,5 +17,5 @@ public interface PatchMCP extends MCPItem {
|
||||
*/
|
||||
ChangeMCP applyPatch(RecordTemplate recordTemplate, AspectRetriever aspectRetriever);
|
||||
|
||||
Patch getPatch();
|
||||
JsonPatch getPatch();
|
||||
}
|
||||
|
||||
@ -1,12 +1,15 @@
|
||||
package com.linkedin.metadata.aspect.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.github.fge.jsonpatch.JsonPatch;
|
||||
import java.io.IOException;
|
||||
import com.linkedin.util.Pair;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonArrayBuilder;
|
||||
import jakarta.json.JsonPatch;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.AllArgsConstructor;
|
||||
@ -21,7 +24,7 @@ import lombok.NoArgsConstructor;
|
||||
public class GenericJsonPatch {
|
||||
@Nullable private Map<String, List<String>> arrayPrimaryKeys;
|
||||
|
||||
@Nonnull private JsonNode patch;
|
||||
@Nonnull private List<PatchOp> patch;
|
||||
|
||||
@Nonnull
|
||||
public Map<String, List<String>> getArrayPrimaryKeys() {
|
||||
@ -29,7 +32,27 @@ public class GenericJsonPatch {
|
||||
}
|
||||
|
||||
@JsonIgnore
|
||||
public JsonPatch getJsonPatch() throws IOException {
|
||||
return JsonPatch.fromJson(patch);
|
||||
public JsonPatch getJsonPatch() {
|
||||
JsonArrayBuilder arrayBuilder = Json.createArrayBuilder();
|
||||
patch.forEach(op -> arrayBuilder.add(Json.createObjectBuilder(op.toMap())));
|
||||
return Json.createPatch(arrayBuilder.build());
|
||||
}
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public static class PatchOp {
|
||||
@Nonnull private String op;
|
||||
@Nonnull private String path;
|
||||
@Nullable private Object value;
|
||||
|
||||
public Map<String, ?> toMap() {
|
||||
if (value != null) {
|
||||
return Stream.of(Pair.of("op", op), Pair.of("path", path), Pair.of("value", value))
|
||||
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
|
||||
} else {
|
||||
return Stream.of(Pair.of("op", op), Pair.of("path", path))
|
||||
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -15,10 +15,9 @@ import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME;
|
||||
import static com.linkedin.metadata.Constants.UPSTREAM_LINEAGE_ASPECT_NAME;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
import com.linkedin.metadata.models.AspectSpec;
|
||||
import jakarta.json.JsonPatch;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -75,12 +74,11 @@ public class AspectTemplateEngine {
|
||||
* @param aspectSpec aspectSpec of the template
|
||||
* @return a {@link RecordTemplate} with the patch applied
|
||||
* @throws JsonProcessingException if there is an issue with processing the record template's json
|
||||
* @throws JsonPatchException if there is an issue with applying the json patch
|
||||
*/
|
||||
@Nonnull
|
||||
public <T extends RecordTemplate> RecordTemplate applyPatch(
|
||||
RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec)
|
||||
throws JsonProcessingException, JsonPatchException {
|
||||
RecordTemplate recordTemplate, JsonPatch jsonPatch, AspectSpec aspectSpec)
|
||||
throws JsonProcessingException {
|
||||
Template<T> template = getTemplate(aspectSpec);
|
||||
return template.applyPatch(recordTemplate, jsonPatch);
|
||||
}
|
||||
|
||||
@ -1,23 +1,29 @@
|
||||
package com.linkedin.metadata.aspect.patch.template;
|
||||
|
||||
import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.OBJECT_MAPPER;
|
||||
import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateTopLevelKeys;
|
||||
|
||||
import com.datahub.util.RecordUtils;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonObject;
|
||||
import jakarta.json.JsonPatch;
|
||||
import java.io.StringReader;
|
||||
|
||||
public abstract class CompoundKeyTemplate<T extends RecordTemplate>
|
||||
implements ArrayMergingTemplate<T> {
|
||||
|
||||
@Override
|
||||
public T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch)
|
||||
throws JsonProcessingException, JsonPatchException {
|
||||
public T applyPatch(RecordTemplate recordTemplate, JsonPatch jsonPatch)
|
||||
throws JsonProcessingException {
|
||||
JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch);
|
||||
JsonNode patched = jsonPatch.apply(transformed);
|
||||
JsonNode postProcessed = rebaseFields(patched);
|
||||
JsonObject patched =
|
||||
jsonPatch.apply(
|
||||
Json.createReader(new StringReader(OBJECT_MAPPER.writeValueAsString(transformed)))
|
||||
.readObject());
|
||||
JsonNode postProcessed = rebaseFields(OBJECT_MAPPER.readTree(patched.toString()));
|
||||
return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,9 +6,11 @@ import static com.linkedin.metadata.aspect.patch.template.TemplateUtil.populateT
|
||||
import com.datahub.util.RecordUtils;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonObject;
|
||||
import jakarta.json.JsonPatch;
|
||||
import java.io.StringReader;
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public interface Template<T extends RecordTemplate> {
|
||||
@ -45,18 +47,21 @@ public interface Template<T extends RecordTemplate> {
|
||||
* @param jsonPatch patch to apply
|
||||
* @return patched value
|
||||
* @throws JsonProcessingException if there is an issue converting the input to JSON
|
||||
* @throws JsonPatchException if there is an issue applying the patch
|
||||
*/
|
||||
default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch)
|
||||
throws JsonProcessingException, JsonPatchException {
|
||||
|
||||
default T applyPatch(RecordTemplate recordTemplate, JsonPatch jsonPatch)
|
||||
throws JsonProcessingException {
|
||||
TemplateUtil.validatePatch(jsonPatch);
|
||||
|
||||
JsonNode transformed = populateTopLevelKeys(preprocessTemplate(recordTemplate), jsonPatch);
|
||||
try {
|
||||
JsonNode patched = jsonPatch.apply(transformed);
|
||||
JsonNode postProcessed = rebaseFields(patched);
|
||||
// Hack in a more efficient patcher. Even with the serialization overhead 140% faster
|
||||
JsonObject patched =
|
||||
jsonPatch.apply(
|
||||
Json.createReader(new StringReader(OBJECT_MAPPER.writeValueAsString(transformed)))
|
||||
.readObject());
|
||||
JsonNode postProcessed = rebaseFields(OBJECT_MAPPER.readTree(patched.toString()));
|
||||
return RecordUtils.toRecordTemplate(getTemplateType(), postProcessed.toString());
|
||||
} catch (JsonPatchException e) {
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(
|
||||
String.format(
|
||||
"Error performing JSON PATCH on aspect %s. Patch: %s Target: %s",
|
||||
|
||||
@ -8,9 +8,10 @@ import com.fasterxml.jackson.core.StreamReadConstraints;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.metadata.aspect.patch.PatchOperationType;
|
||||
import com.linkedin.util.Pair;
|
||||
import jakarta.json.JsonPatch;
|
||||
import jakarta.json.JsonValue;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@ -30,34 +31,32 @@ public class TemplateUtil {
|
||||
.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build());
|
||||
}
|
||||
|
||||
public static List<Pair<PatchOperationType, String>> getPaths(Patch jsonPatch) {
|
||||
JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch);
|
||||
public static List<Pair<PatchOperationType, String>> getPaths(JsonPatch jsonPatch) {
|
||||
List<Pair<PatchOperationType, String>> paths = new ArrayList<>();
|
||||
patchNode
|
||||
.elements()
|
||||
.forEachRemaining(
|
||||
jsonPatch.toJsonArray().stream()
|
||||
.map(JsonValue::asJsonObject)
|
||||
.forEach(
|
||||
node ->
|
||||
paths.add(
|
||||
Pair.of(
|
||||
PatchOperationType.valueOf(node.get("op").asText().toUpperCase()),
|
||||
node.get("path").asText())));
|
||||
PatchOperationType.valueOf(node.getString("op").toUpperCase()),
|
||||
node.getString("path"))));
|
||||
return paths;
|
||||
}
|
||||
|
||||
public static void validatePatch(Patch jsonPatch) {
|
||||
public static void validatePatch(JsonPatch jsonPatch) {
|
||||
// ensure supported patch operations
|
||||
JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch);
|
||||
patchNode
|
||||
.elements()
|
||||
.forEachRemaining(
|
||||
node -> {
|
||||
jsonPatch.toJsonArray().stream()
|
||||
.map(JsonValue::asJsonObject)
|
||||
.forEach(
|
||||
jsonObject -> {
|
||||
try {
|
||||
PatchOperationType.valueOf(node.get("op").asText().toUpperCase());
|
||||
PatchOperationType.valueOf(jsonObject.getString("op").toUpperCase());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
String.format(
|
||||
"Unsupported PATCH operation: `%s` Operation `%s`",
|
||||
node.get("op").asText(), node),
|
||||
jsonObject.getString("op"), jsonObject),
|
||||
e);
|
||||
}
|
||||
});
|
||||
@ -70,7 +69,7 @@ public class TemplateUtil {
|
||||
* @param transformedNode transformed node to have keys populated
|
||||
* @return transformed node that has top level keys populated
|
||||
*/
|
||||
public static JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) {
|
||||
public static JsonNode populateTopLevelKeys(JsonNode transformedNode, JsonPatch jsonPatch) {
|
||||
JsonNode transformedNodeClone = transformedNode.deepCopy();
|
||||
List<Pair<PatchOperationType, String>> paths = getPaths(jsonPatch);
|
||||
for (Pair<PatchOperationType, String> operationPath : paths) {
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
package com.linkedin.metadata.aspect.patch.template.common;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
import com.linkedin.metadata.aspect.patch.GenericJsonPatch;
|
||||
import com.linkedin.metadata.aspect.patch.template.CompoundKeyTemplate;
|
||||
@ -53,7 +52,7 @@ public class GenericPatchTemplate<T extends RecordTemplate> extends CompoundKeyT
|
||||
return transformedNode;
|
||||
}
|
||||
|
||||
public T applyPatch(RecordTemplate recordTemplate) throws IOException, JsonPatchException {
|
||||
public T applyPatch(RecordTemplate recordTemplate) throws IOException {
|
||||
return super.applyPatch(recordTemplate, genericJsonPatch.getJsonPatch());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,17 +1,11 @@
|
||||
package com.linkedin.metadata.aspect.patch.template;
|
||||
|
||||
import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.github.fge.jackson.jsonpointer.JsonPointer;
|
||||
import com.github.fge.jsonpatch.AddOperation;
|
||||
import com.github.fge.jsonpatch.JsonPatch;
|
||||
import com.github.fge.jsonpatch.JsonPatchOperation;
|
||||
import com.linkedin.chart.ChartInfo;
|
||||
import com.linkedin.common.urn.UrnUtils;
|
||||
import com.linkedin.metadata.aspect.patch.template.chart.ChartInfoTemplate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonObjectBuilder;
|
||||
import jakarta.json.JsonPatchBuilder;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
@ -21,18 +15,16 @@ public class ChartInfoTemplateTest {
|
||||
public void testChartInfoTemplate() throws Exception {
|
||||
ChartInfoTemplate chartInfoTemplate = new ChartInfoTemplate();
|
||||
ChartInfo dashboardInfo = chartInfoTemplate.getDefault();
|
||||
List<JsonPatchOperation> patchOperations = new ArrayList<>();
|
||||
ObjectNode edgeNode = instance.objectNode();
|
||||
edgeNode.put(
|
||||
JsonPatchBuilder patchOperations = Json.createPatchBuilder();
|
||||
|
||||
JsonObjectBuilder edgeNode = Json.createObjectBuilder();
|
||||
edgeNode.add(
|
||||
"destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)");
|
||||
JsonPatchOperation operation =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/inputEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"),
|
||||
edgeNode);
|
||||
patchOperations.add(operation);
|
||||
JsonPatch patch = new JsonPatch(patchOperations);
|
||||
ChartInfo result = chartInfoTemplate.applyPatch(dashboardInfo, patch);
|
||||
|
||||
patchOperations.add(
|
||||
"/inputEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)",
|
||||
edgeNode.build());
|
||||
ChartInfo result = chartInfoTemplate.applyPatch(dashboardInfo, patchOperations.build());
|
||||
|
||||
Assert.assertEquals(
|
||||
UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"),
|
||||
|
||||
@ -1,17 +1,10 @@
|
||||
package com.linkedin.metadata.aspect.patch.template;
|
||||
|
||||
import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.github.fge.jackson.jsonpointer.JsonPointer;
|
||||
import com.github.fge.jsonpatch.AddOperation;
|
||||
import com.github.fge.jsonpatch.JsonPatch;
|
||||
import com.github.fge.jsonpatch.JsonPatchOperation;
|
||||
import com.linkedin.common.urn.UrnUtils;
|
||||
import com.linkedin.dashboard.DashboardInfo;
|
||||
import com.linkedin.metadata.aspect.patch.template.dashboard.DashboardInfoTemplate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonPatchBuilder;
|
||||
import org.testng.Assert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
@ -21,18 +14,18 @@ public class DashboardInfoTemplateTest {
|
||||
public void testDashboardInfoTemplate() throws Exception {
|
||||
DashboardInfoTemplate dashboardInfoTemplate = new DashboardInfoTemplate();
|
||||
DashboardInfo dashboardInfo = dashboardInfoTemplate.getDefault();
|
||||
List<JsonPatchOperation> patchOperations = new ArrayList<>();
|
||||
ObjectNode edgeNode = instance.objectNode();
|
||||
edgeNode.put(
|
||||
"destinationUrn", "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)");
|
||||
JsonPatchOperation operation =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/datasetEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"),
|
||||
edgeNode);
|
||||
patchOperations.add(operation);
|
||||
JsonPatch patch = new JsonPatch(patchOperations);
|
||||
DashboardInfo result = dashboardInfoTemplate.applyPatch(dashboardInfo, patch);
|
||||
JsonPatchBuilder jsonPatchBuilder = Json.createPatchBuilder();
|
||||
jsonPatchBuilder.add(
|
||||
"/datasetEdges/urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)",
|
||||
Json.createObjectBuilder()
|
||||
.add(
|
||||
"destinationUrn",
|
||||
Json.createValue(
|
||||
"urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"))
|
||||
.build());
|
||||
|
||||
DashboardInfo result =
|
||||
dashboardInfoTemplate.applyPatch(dashboardInfo, jsonPatchBuilder.build());
|
||||
|
||||
Assert.assertEquals(
|
||||
UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"),
|
||||
|
||||
@ -1,47 +1,51 @@
|
||||
package com.linkedin.metadata.aspect.patch.template;
|
||||
|
||||
import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*;
|
||||
import static com.linkedin.metadata.utils.GenericRecordUtils.JSON;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.NumericNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.github.fge.jackson.jsonpointer.JsonPointer;
|
||||
import com.github.fge.jsonpatch.AddOperation;
|
||||
import com.github.fge.jsonpatch.JsonPatch;
|
||||
import com.github.fge.jsonpatch.JsonPatchOperation;
|
||||
import com.github.fge.jsonpatch.RemoveOperation;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.linkedin.common.UrnArray;
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.common.urn.UrnUtils;
|
||||
import com.linkedin.data.ByteString;
|
||||
import com.linkedin.data.DataMap;
|
||||
import com.linkedin.dataset.FineGrainedLineage;
|
||||
import com.linkedin.dataset.FineGrainedLineageDownstreamType;
|
||||
import com.linkedin.dataset.FineGrainedLineageUpstreamType;
|
||||
import com.linkedin.dataset.UpstreamLineage;
|
||||
import com.linkedin.metadata.aspect.patch.template.dataset.UpstreamLineageTemplate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.testng.Assert;
|
||||
import com.linkedin.metadata.utils.GenericRecordUtils;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonObjectBuilder;
|
||||
import jakarta.json.JsonPatch;
|
||||
import jakarta.json.JsonPatchBuilder;
|
||||
import jakarta.json.JsonValue;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
public class UpstreamLineageTemplateTest {
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testPatchUpstream() throws Exception {
|
||||
UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate();
|
||||
UpstreamLineage upstreamLineage = upstreamLineageTemplate.getDefault();
|
||||
List<JsonPatchOperation> patchOperations = new ArrayList<>();
|
||||
ObjectNode fineGrainedLineageNode = instance.objectNode();
|
||||
NumericNode upstreamConfidenceScore = instance.numberNode(1.0f);
|
||||
fineGrainedLineageNode.set("confidenceScore", upstreamConfidenceScore);
|
||||
JsonPatchOperation operation =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)//urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c1)"),
|
||||
fineGrainedLineageNode);
|
||||
patchOperations.add(operation);
|
||||
JsonPatch jsonPatch = new JsonPatch(patchOperations);
|
||||
JsonPatchBuilder jsonPatchBuilder = Json.createPatchBuilder();
|
||||
|
||||
JsonObjectBuilder fineGrainedLineageNode = Json.createObjectBuilder();
|
||||
JsonValue upstreamConfidenceScore = Json.createValue(1.0f);
|
||||
fineGrainedLineageNode.add("confidenceScore", upstreamConfidenceScore);
|
||||
|
||||
jsonPatchBuilder.add(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)//urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c1)",
|
||||
fineGrainedLineageNode.build());
|
||||
|
||||
// Initial population test
|
||||
UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch);
|
||||
UpstreamLineage result =
|
||||
upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatchBuilder.build());
|
||||
// Hack because Jackson parses values to doubles instead of floats
|
||||
DataMap dataMap = new DataMap();
|
||||
dataMap.put("confidenceScore", 1.0);
|
||||
@ -61,36 +65,35 @@ public class UpstreamLineageTemplateTest {
|
||||
fineGrainedLineage.setTransformOperation("CREATE");
|
||||
fineGrainedLineage.setUpstreamType(FineGrainedLineageUpstreamType.FIELD_SET);
|
||||
fineGrainedLineage.setDownstreamType(FineGrainedLineageDownstreamType.FIELD);
|
||||
Assert.assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage);
|
||||
assertEquals(result.getFineGrainedLineages().get(0), fineGrainedLineage);
|
||||
|
||||
// Test non-overwrite upstreams and correct confidence score and types w/ overwrite
|
||||
ObjectNode finegrainedLineageNode2 = instance.objectNode();
|
||||
finegrainedLineageNode2.set(
|
||||
"upstreamType", instance.textNode(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode2.set("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode2.set(
|
||||
"downstreamType", instance.textNode(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
JsonPatchOperation operation2 =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"),
|
||||
finegrainedLineageNode2);
|
||||
NumericNode upstreamConfidenceScore2 = instance.numberNode(0.1f);
|
||||
ObjectNode finegrainedLineageNode3 = instance.objectNode();
|
||||
finegrainedLineageNode3.set(
|
||||
"upstreamType", instance.textNode(FineGrainedLineageUpstreamType.DATASET.name()));
|
||||
finegrainedLineageNode3.set("confidenceScore", upstreamConfidenceScore2);
|
||||
finegrainedLineageNode3.set(
|
||||
"downstreamType", instance.textNode(FineGrainedLineageDownstreamType.FIELD_SET.name()));
|
||||
JsonPatchOperation operation3 =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"),
|
||||
finegrainedLineageNode3);
|
||||
List<JsonPatchOperation> patchOperations2 = new ArrayList<>();
|
||||
patchOperations2.add(operation2);
|
||||
patchOperations2.add(operation3);
|
||||
JsonPatch jsonPatch2 = new JsonPatch(patchOperations2);
|
||||
JsonObjectBuilder finegrainedLineageNode2 = Json.createObjectBuilder();
|
||||
finegrainedLineageNode2.add(
|
||||
"upstreamType", Json.createValue(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode2.add("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode2.add(
|
||||
"downstreamType", Json.createValue(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
|
||||
JsonPatchBuilder patchOperations2 = Json.createPatchBuilder();
|
||||
patchOperations2.add(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)",
|
||||
finegrainedLineageNode2.build());
|
||||
|
||||
JsonValue upstreamConfidenceScore2 = Json.createValue(0.1f);
|
||||
JsonObjectBuilder finegrainedLineageNode3 = Json.createObjectBuilder();
|
||||
finegrainedLineageNode3.add(
|
||||
"upstreamType", Json.createValue(FineGrainedLineageUpstreamType.DATASET.name()));
|
||||
finegrainedLineageNode3.add("confidenceScore", upstreamConfidenceScore2);
|
||||
finegrainedLineageNode3.add(
|
||||
"downstreamType", Json.createValue(FineGrainedLineageDownstreamType.FIELD_SET.name()));
|
||||
|
||||
patchOperations2.add(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)",
|
||||
finegrainedLineageNode3.build());
|
||||
|
||||
JsonPatch jsonPatch2 = patchOperations2.build();
|
||||
|
||||
UpstreamLineage result2 = upstreamLineageTemplate.applyPatch(result, jsonPatch2);
|
||||
// Hack because Jackson parses values to doubles instead of floats
|
||||
DataMap dataMap2 = new DataMap();
|
||||
@ -112,23 +115,22 @@ public class UpstreamLineageTemplateTest {
|
||||
fineGrainedLineage2.setUpstreamType(FineGrainedLineageUpstreamType.DATASET);
|
||||
fineGrainedLineage2.setDownstreamType(FineGrainedLineageDownstreamType.FIELD_SET);
|
||||
fineGrainedLineage2.setQuery(UrnUtils.getUrn("urn:li:query:someQuery"));
|
||||
Assert.assertEquals(result2.getFineGrainedLineages().get(1), fineGrainedLineage2);
|
||||
assertEquals(result2.getFineGrainedLineages().get(1), fineGrainedLineage2);
|
||||
|
||||
// Check different queries
|
||||
ObjectNode finegrainedLineageNode4 = instance.objectNode();
|
||||
finegrainedLineageNode4.set(
|
||||
"upstreamType", instance.textNode(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode4.set("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode4.set(
|
||||
"downstreamType", instance.textNode(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
JsonPatchOperation operation4 =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"),
|
||||
finegrainedLineageNode4);
|
||||
List<JsonPatchOperation> patchOperations3 = new ArrayList<>();
|
||||
patchOperations3.add(operation4);
|
||||
JsonPatch jsonPatch3 = new JsonPatch(patchOperations3);
|
||||
JsonObjectBuilder finegrainedLineageNode4 = Json.createObjectBuilder();
|
||||
finegrainedLineageNode4.add(
|
||||
"upstreamType", Json.createValue(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode4.add("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode4.add(
|
||||
"downstreamType", Json.createValue(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
|
||||
JsonPatchBuilder patchOperations3 = Json.createPatchBuilder();
|
||||
patchOperations3.add(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)",
|
||||
finegrainedLineageNode4.build());
|
||||
|
||||
JsonPatch jsonPatch3 = patchOperations3.build();
|
||||
UpstreamLineage result3 = upstreamLineageTemplate.applyPatch(result2, jsonPatch3);
|
||||
// Hack because Jackson parses values to doubles instead of floats
|
||||
DataMap dataMap3 = new DataMap();
|
||||
@ -152,23 +154,22 @@ public class UpstreamLineageTemplateTest {
|
||||
fineGrainedLineage3.setDownstreamType(FineGrainedLineageDownstreamType.FIELD);
|
||||
fineGrainedLineage3.setQuery(UrnUtils.getUrn("urn:li:query:anotherQuery"));
|
||||
// Splits into two for different types
|
||||
Assert.assertEquals(result3.getFineGrainedLineages().get(2), fineGrainedLineage3);
|
||||
assertEquals(result3.getFineGrainedLineages().get(2), fineGrainedLineage3);
|
||||
|
||||
// Check different transform types
|
||||
ObjectNode finegrainedLineageNode5 = instance.objectNode();
|
||||
finegrainedLineageNode5.set(
|
||||
"upstreamType", instance.textNode(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode5.set("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode5.set(
|
||||
"downstreamType", instance.textNode(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
JsonPatchOperation operation5 =
|
||||
new AddOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/TRANSFORM/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"),
|
||||
finegrainedLineageNode5);
|
||||
List<JsonPatchOperation> patchOperations4 = new ArrayList<>();
|
||||
patchOperations4.add(operation5);
|
||||
JsonPatch jsonPatch4 = new JsonPatch(patchOperations4);
|
||||
JsonObjectBuilder finegrainedLineageNode5 = Json.createObjectBuilder();
|
||||
finegrainedLineageNode5.add(
|
||||
"upstreamType", Json.createValue(FineGrainedLineageUpstreamType.FIELD_SET.name()));
|
||||
finegrainedLineageNode5.add("confidenceScore", upstreamConfidenceScore);
|
||||
finegrainedLineageNode5.add(
|
||||
"downstreamType", Json.createValue(FineGrainedLineageDownstreamType.FIELD.name()));
|
||||
|
||||
JsonPatchBuilder patchOperations4 = Json.createPatchBuilder();
|
||||
patchOperations4.add(
|
||||
"/fineGrainedLineages/TRANSFORM/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)",
|
||||
finegrainedLineageNode5.build());
|
||||
JsonPatch jsonPatch4 = patchOperations4.build();
|
||||
|
||||
UpstreamLineage result4 = upstreamLineageTemplate.applyPatch(result3, jsonPatch4);
|
||||
// Hack because Jackson parses values to doubles instead of floats
|
||||
DataMap dataMap4 = new DataMap();
|
||||
@ -181,33 +182,76 @@ public class UpstreamLineageTemplateTest {
|
||||
fineGrainedLineage4.setDownstreamType(FineGrainedLineageDownstreamType.FIELD);
|
||||
fineGrainedLineage4.setQuery(UrnUtils.getUrn("urn:li:query:anotherQuery"));
|
||||
// New entry in array because of new transformation type
|
||||
Assert.assertEquals(result4.getFineGrainedLineages().get(3), fineGrainedLineage4);
|
||||
assertEquals(result4.getFineGrainedLineages().get(3), fineGrainedLineage4);
|
||||
|
||||
// Remove
|
||||
JsonPatchOperation removeOperation =
|
||||
new RemoveOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)/NONE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c1)"));
|
||||
JsonPatchOperation removeOperation2 =
|
||||
new RemoveOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"));
|
||||
JsonPatchOperation removeOperation3 =
|
||||
new RemoveOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"));
|
||||
JsonPatchOperation removeOperation4 =
|
||||
new RemoveOperation(
|
||||
new JsonPointer(
|
||||
"/fineGrainedLineages/TRANSFORM/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)"));
|
||||
JsonPatchBuilder removeOperations = Json.createPatchBuilder();
|
||||
removeOperations.remove(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c1)/NONE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c1)");
|
||||
removeOperations.remove(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:someQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)");
|
||||
removeOperations.remove(
|
||||
"/fineGrainedLineages/CREATE/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)");
|
||||
removeOperations.remove(
|
||||
"/fineGrainedLineages/TRANSFORM/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_2,PROD),c2)/urn:li:query:anotherQuery/urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:bigquery,upstream_table_1,PROD),c2)");
|
||||
|
||||
List<JsonPatchOperation> removeOperations = new ArrayList<>();
|
||||
removeOperations.add(removeOperation);
|
||||
removeOperations.add(removeOperation2);
|
||||
removeOperations.add(removeOperation3);
|
||||
removeOperations.add(removeOperation4);
|
||||
JsonPatch removePatch = new JsonPatch(removeOperations);
|
||||
JsonPatch removePatch = removeOperations.build();
|
||||
UpstreamLineage finalResult = upstreamLineageTemplate.applyPatch(result4, removePatch);
|
||||
Assert.assertEquals(finalResult, upstreamLineageTemplate.getDefault());
|
||||
assertEquals(finalResult, upstreamLineageTemplate.getDefault());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLargePatchStandard() throws Exception {
|
||||
// Load patch operations from fixture
|
||||
String patchStr =
|
||||
OBJECT_MAPPER
|
||||
.readTree(
|
||||
new GzipCompressorInputStream(
|
||||
this.getClass()
|
||||
.getResourceAsStream("/patch/large_upstream_lineage_mcp.json.gz")))
|
||||
.get("aspect")
|
||||
.get("com.linkedin.pegasus2avro.mxe.GenericAspect")
|
||||
.get("value")
|
||||
.asText();
|
||||
|
||||
JsonPatchBuilder patchBuilder =
|
||||
Json.createPatchBuilder(Json.createReader(new StringReader(patchStr)).readArray());
|
||||
|
||||
// Overall the patch is a no-op, adding change to assert difference after application
|
||||
patchBuilder.remove(
|
||||
"/upstreams/urn:li:dataset:(urn:li:dataPlatform:snowflake,road_curated_nrt.db_3134_dbo.lineitem,PROD)");
|
||||
|
||||
JsonPatch jsonPatch = patchBuilder.build();
|
||||
assertEquals(jsonPatch.toJsonArray().size(), 7491);
|
||||
|
||||
// Load existing aspect
|
||||
String aspectStr =
|
||||
OBJECT_MAPPER
|
||||
.readTree(
|
||||
new GzipCompressorInputStream(
|
||||
this.getClass()
|
||||
.getResourceAsStream("/patch/large_upstream_lineage_aspect.json.gz")))
|
||||
.get("select")
|
||||
.get(0)
|
||||
.get("metadata")
|
||||
.asText();
|
||||
UpstreamLineage upstreamLineage =
|
||||
GenericRecordUtils.deserializeAspect(
|
||||
ByteString.copyString(aspectStr, StandardCharsets.UTF_8), JSON, UpstreamLineage.class);
|
||||
assertEquals(upstreamLineage.getUpstreams().size(), 188);
|
||||
assertEquals(upstreamLineage.getFineGrainedLineages().size(), 607);
|
||||
|
||||
// Apply patch standard
|
||||
UpstreamLineageTemplate upstreamLineageTemplate = new UpstreamLineageTemplate();
|
||||
|
||||
long start = System.currentTimeMillis();
|
||||
UpstreamLineage result = upstreamLineageTemplate.applyPatch(upstreamLineage, jsonPatch);
|
||||
long end = System.currentTimeMillis();
|
||||
assertTrue(
|
||||
end - start < 10000,
|
||||
String.format("Expected less then 10 seconds patch actual %s ms", end - start));
|
||||
|
||||
assertEquals(result.getUpstreams().size(), 187, "Expected 1 less upstream");
|
||||
assertEquals(result.getFineGrainedLineages().size(), 607);
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@ -118,7 +118,8 @@ shadowJar {
|
||||
relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer'
|
||||
relocate 'io.github.classgraph', 'datahub.shaded.io.github.classgraph'
|
||||
relocate 'nonapi.io.github.classgraph', 'datahub.shaded.nonapi.io.github.classgraph'
|
||||
relocate 'com.github.fge', 'datahub.shaded.com.github.fge'
|
||||
relocate 'org.eclipse.parsson', 'datahub.shaded.parsson'
|
||||
relocate 'jakarta.json', 'datahub.shaded.json'
|
||||
|
||||
finalizedBy checkShadowJar
|
||||
}
|
||||
|
||||
@ -25,7 +25,8 @@ dependencies {
|
||||
|
||||
implementation externalDependency.guava
|
||||
implementation externalDependency.reflections
|
||||
implementation externalDependency.jsonPatch
|
||||
|
||||
implementation 'com.github.java-json-tools:json-patch:1.13' // TODO: Replace with jakarta.json
|
||||
api(externalDependency.dgraph4j) {
|
||||
exclude group: 'com.google.guava', module: 'guava'
|
||||
exclude group: 'io.grpc', module: 'grpc-protobuf'
|
||||
|
||||
@ -3,7 +3,6 @@ package com.linkedin.metadata.entity.ebean.batch;
|
||||
import static com.linkedin.metadata.entity.AspectUtils.validateAspect;
|
||||
|
||||
import com.datahub.util.exception.ModelConversionException;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.linkedin.common.AuditStamp;
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
@ -54,7 +53,7 @@ public class ChangeItemImpl implements ChangeMCP {
|
||||
|
||||
try {
|
||||
builder.recordTemplate(genericPatchTemplate.applyPatch(currentValue));
|
||||
} catch (JsonPatchException | IOException e) {
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
|
||||
@ -8,9 +8,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.core.StreamReadConstraints;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.github.fge.jsonpatch.JsonPatch;
|
||||
import com.github.fge.jsonpatch.JsonPatchException;
|
||||
import com.github.fge.jsonpatch.Patch;
|
||||
import com.linkedin.common.AuditStamp;
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.data.template.RecordTemplate;
|
||||
@ -27,7 +24,9 @@ import com.linkedin.metadata.utils.EntityKeyUtils;
|
||||
import com.linkedin.metadata.utils.SystemMetadataUtils;
|
||||
import com.linkedin.mxe.MetadataChangeProposal;
|
||||
import com.linkedin.mxe.SystemMetadata;
|
||||
import java.io.IOException;
|
||||
import jakarta.json.Json;
|
||||
import jakarta.json.JsonPatch;
|
||||
import java.io.StringReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Objects;
|
||||
import javax.annotation.Nonnull;
|
||||
@ -59,7 +58,7 @@ public class PatchItemImpl implements PatchMCP {
|
||||
private final SystemMetadata systemMetadata;
|
||||
private final AuditStamp auditStamp;
|
||||
|
||||
private final Patch patch;
|
||||
private final JsonPatch patch;
|
||||
|
||||
private final MetadataChangeProposal metadataChangeProposal;
|
||||
|
||||
@ -108,7 +107,7 @@ public class PatchItemImpl implements PatchMCP {
|
||||
try {
|
||||
builder.recordTemplate(
|
||||
aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec()));
|
||||
} catch (JsonProcessingException | JsonPatchException e) {
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
@ -178,12 +177,14 @@ public class PatchItemImpl implements PatchMCP {
|
||||
.build(entityRegistry);
|
||||
}
|
||||
|
||||
private static Patch convertToJsonPatch(MetadataChangeProposal mcp) {
|
||||
private static JsonPatch convertToJsonPatch(MetadataChangeProposal mcp) {
|
||||
JsonNode json;
|
||||
try {
|
||||
json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8));
|
||||
return JsonPatch.fromJson(json);
|
||||
} catch (IOException e) {
|
||||
return Json.createPatch(
|
||||
Json.createReader(
|
||||
new StringReader(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)))
|
||||
.readArray());
|
||||
} catch (RuntimeException e) {
|
||||
throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -677,8 +677,7 @@ public class EntityController {
|
||||
@Nonnull AspectSpec aspectSpec,
|
||||
@Nullable RecordTemplate currentValue,
|
||||
@Nonnull GenericPatchTemplate<? extends RecordTemplate> genericPatchTemplate,
|
||||
@Nonnull Actor actor)
|
||||
throws URISyntaxException {
|
||||
@Nonnull Actor actor) {
|
||||
return ChangeItemImpl.fromPatch(
|
||||
urn,
|
||||
aspectSpec,
|
||||
|
||||
@ -8,7 +8,7 @@ configurations {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation externalDependency.jsonPatch
|
||||
implementation 'com.github.java-json-tools:json-patch:1.13' // TODO: Replace with jakarta.json
|
||||
implementation project(':entity-registry')
|
||||
implementation project(':metadata-utils')
|
||||
implementation project(':metadata-events:mxe-avro')
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user