From 9e281d0ee511f3cb2589f1a882ddd301b798301d Mon Sep 17 00:00:00 2001 From: sonika-shah <58761340+sonika-shah@users.noreply.github.com> Date: Wed, 18 Jun 2025 14:03:45 +0530 Subject: [PATCH] fix #21394 Custom properties bug update with existing wrong values (#21825) * fix #21394 allow only updated extension fields to be validated * fix #21394 add tests * fix #21394 add tests --- .../service/jdbi3/EntityRepository.java | 90 +++++++---- .../glossary/GlossaryTermResourceTest.java | 152 ++++++++++++++++++ 2 files changed, 211 insertions(+), 31 deletions(-) diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java index ec952732431..83c6819d507 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java @@ -124,6 +124,7 @@ import java.util.function.BiConsumer; import java.util.function.BiPredicate; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import java.util.stream.StreamSupport; import lombok.Getter; import lombok.NonNull; @@ -1037,7 +1038,7 @@ public abstract class EntityRepository { validateTags(entity); prepare(entity, update); setFullyQualifiedName(entity); - validateExtension(entity); + validateExtension(entity, update); // Domain is already validated } @@ -1778,11 +1779,37 @@ public abstract class EntityRepository { daoCollection.entityExtensionTimeSeriesDao().deleteBeforeTimestamp(fqn, extension, timestamp); } - private void validateExtension(T entity) { + private void validateExtension(T entity, Entry field) { if (entity.getExtension() == null) { return; } + // Validate single extension field + JsonNode jsonNode = JsonUtils.valueToTree(entity.getExtension()); + String fieldName = field.getKey(); + JsonNode fieldValue = field.getValue(); + + JsonSchema jsonSchema = TypeRegistry.instance().getSchema(entityType, fieldName); + if (jsonSchema == null) { + throw new IllegalArgumentException(CatalogExceptionMessage.unknownCustomField(fieldName)); + } + String customPropertyType = TypeRegistry.getCustomPropertyType(entityType, fieldName); + String propertyConfig = TypeRegistry.getCustomPropertyConfig(entityType, fieldName); + validateAndUpdateExtensionBasedOnPropertyType( + entity, (ObjectNode) jsonNode, fieldName, fieldValue, customPropertyType, propertyConfig); + Set validationMessages = jsonSchema.validate(fieldValue); + if (!validationMessages.isEmpty()) { + throw new IllegalArgumentException( + CatalogExceptionMessage.jsonValidationError(fieldName, validationMessages.toString())); + } + } + + private void validateExtension(T entity, boolean update) { + // Validate complete extension field only on POST + if (entity.getExtension() == null || update) { + return; + } + JsonNode jsonNode = JsonUtils.valueToTree(entity.getExtension()); Iterator> customFields = jsonNode.fields(); while (customFields.hasNext()) { @@ -3235,7 +3262,7 @@ public abstract class EntityRepository { updateDescription(); updateDisplayName(); updateOwners(); - updateExtension(); + updateExtension(consolidatingChanges); updateTags( updated.getFullyQualifiedName(), FIELD_TAGS, original.getTags(), updated.getTags()); updateDomain(); @@ -3260,7 +3287,7 @@ public abstract class EntityRepository { updateDescription(); updateDisplayName(); updateOwnersForImport(); - updateExtension(); + updateExtension(consolidatingChanges); updateTagsForImport( updated.getFullyQualifiedName(), FIELD_TAGS, original.getTags(), updated.getTags()); updateDomainForImport(); @@ -3413,7 +3440,7 @@ public abstract class EntityRepository { applyTags(updatedTags, fqn); } - private void updateExtension() { + private void updateExtension(boolean consolidatingChanges) { Object origExtension = original.getExtension(); Object updatedExtension = updated.getExtension(); if (origExtension == updatedExtension) { @@ -3433,39 +3460,40 @@ public abstract class EntityRepository { return; } - List added = new ArrayList<>(); - List deleted = new ArrayList<>(); - JsonNode origFields = JsonUtils.valueToTree(origExtension); - JsonNode updatedFields = JsonUtils.valueToTree(updatedExtension); + List addedFields = new ArrayList<>(); + List deletedFields = new ArrayList<>(); + List updatedFields = new ArrayList<>(); + JsonNode origExtensionFields = JsonUtils.valueToTree(origExtension); + JsonNode updatedExtensionFields = JsonUtils.valueToTree(updatedExtension); + Set allKeys = new HashSet<>(); + origExtensionFields.fieldNames().forEachRemaining(allKeys::add); + updatedExtensionFields.fieldNames().forEachRemaining(allKeys::add); - // Check for updated and deleted fields - for (Iterator> it = origFields.fields(); it.hasNext(); ) { - Entry orig = it.next(); - JsonNode updatedField = updatedFields.get(orig.getKey()); - if (updatedField == null) { - deleted.add(JsonUtils.getObjectNode(orig.getKey(), orig.getValue())); - } else { - // TODO converting to a string is a hack for now because JsonNode equals issues - recordChange( - getExtensionField(orig.getKey()), - orig.getValue().toString(), - updatedField.toString()); + for (String key : allKeys) { + JsonNode origValue = origExtensionFields.get(key); + JsonNode updatedValue = updatedExtensionFields.get(key); + + if (origValue == null) { + addedFields.add(JsonUtils.getObjectNode(key, updatedValue)); + } else if (updatedValue == null) { + deletedFields.add(JsonUtils.getObjectNode(key, origValue)); + } else if (!origValue.equals(updatedValue)) { + updatedFields.add(JsonUtils.getObjectNode(key, updatedValue)); + recordChange(getExtensionField(key), origValue.toString(), updatedValue.toString()); } } - // Check for added fields - for (Iterator> it = updatedFields.fields(); it.hasNext(); ) { - Entry updatedField = it.next(); - JsonNode orig = origFields.get(updatedField.getKey()); - if (orig == null) { - added.add(JsonUtils.getObjectNode(updatedField.getKey(), updatedField.getValue())); + if (!consolidatingChanges) { + for (JsonNode node : + Stream.of(addedFields, updatedFields, deletedFields).flatMap(List::stream).toList()) { + node.fields().forEachRemaining(field -> validateExtension(updated, field)); } } - if (!added.isEmpty()) { - fieldAdded(changeDescription, FIELD_EXTENSION, JsonUtils.pojoToJson(added)); + if (!addedFields.isEmpty()) { + fieldAdded(changeDescription, FIELD_EXTENSION, JsonUtils.pojoToJson(addedFields)); } - if (!deleted.isEmpty()) { - fieldDeleted(changeDescription, FIELD_EXTENSION, JsonUtils.pojoToJson(deleted)); + if (!deletedFields.isEmpty()) { + fieldDeleted(changeDescription, FIELD_EXTENSION, JsonUtils.pojoToJson(deletedFields)); } removeExtension(original); storeExtension(updated); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java index 7b9886d87f8..10b56a3ba0f 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryTermResourceTest.java @@ -43,6 +43,10 @@ import static org.openmetadata.service.util.EntityUtil.toTagLabels; import static org.openmetadata.service.util.TestUtils.*; import static org.openmetadata.service.util.TestUtils.UpdateType.MINOR_UPDATE; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import jakarta.ws.rs.client.WebTarget; import jakarta.ws.rs.core.Response; import java.io.IOException; @@ -78,6 +82,7 @@ import org.openmetadata.schema.api.data.CreateGlossaryTerm; import org.openmetadata.schema.api.data.CreateTable; import org.openmetadata.schema.api.data.TermReference; import org.openmetadata.schema.api.feed.ResolveTask; +import org.openmetadata.schema.entity.Type; import org.openmetadata.schema.entity.classification.Classification; import org.openmetadata.schema.entity.classification.Tag; import org.openmetadata.schema.entity.data.EntityHierarchy; @@ -86,9 +91,11 @@ import org.openmetadata.schema.entity.data.GlossaryTerm; import org.openmetadata.schema.entity.data.GlossaryTerm.Status; import org.openmetadata.schema.entity.data.Table; import org.openmetadata.schema.entity.feed.Thread; +import org.openmetadata.schema.entity.type.CustomProperty; import org.openmetadata.schema.entity.type.Style; import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.Column; +import org.openmetadata.schema.type.CustomPropertyConfig; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.TagLabel; @@ -98,11 +105,13 @@ import org.openmetadata.schema.type.api.BulkOperationResult; import org.openmetadata.schema.type.api.BulkResponse; import org.openmetadata.service.Entity; import org.openmetadata.service.governance.workflows.WorkflowHandler; +import org.openmetadata.service.jdbi3.GlossaryTermRepository; import org.openmetadata.service.resources.EntityResourceTest; import org.openmetadata.service.resources.databases.TableResourceTest; import org.openmetadata.service.resources.feeds.FeedResource.ThreadList; import org.openmetadata.service.resources.feeds.FeedResourceTest; import org.openmetadata.service.resources.feeds.MessageParser.EntityLink; +import org.openmetadata.service.resources.metadata.TypeResourceTest; import org.openmetadata.service.resources.tags.ClassificationResourceTest; import org.openmetadata.service.resources.tags.TagResourceTest; import org.openmetadata.service.util.EntityUtil; @@ -1597,6 +1606,149 @@ public class GlossaryTermResourceTest extends EntityResourceTest existingExtension = + (Map) termWithCorruptData.getExtension(); + Map extensionUpdate = new HashMap<>(existingExtension); + extensionUpdate.put("sensitivity", List.of("internal")); + termWithCorruptData.setExtension(extensionUpdate); + + // Patch the entity with our updated extension that preserves the corrupt field + GlossaryTerm updatedTerm = + patchEntity(term.getId(), json, termWithCorruptData, ADMIN_AUTH_HEADERS); + + // Verify both the corrupt data and new valid property are present after the update + JsonNode resultExtension = JsonUtils.valueToTree(updatedTerm.getExtension()); + assertEquals("wrongValue", resultExtension.get("certified").get(1).asText()); + assertEquals("internal", resultExtension.get("sensitivity").get(0).asText()); + + // SCENARIO 2: Try to modify existing corrupt property by adding new valid value and not + // removing the wrong one + // This should fail with a 400 error + String jsonForWrongUpdate = JsonUtils.pojoToJson(updatedTerm); + Map invalidExtension = new HashMap<>(existingExtension); + invalidExtension.put("certified", List.of("wrongValue", "official")); + invalidExtension.put("sensitivity", List.of("internal")); + updatedTerm.setExtension(invalidExtension); + + // Expect a 400 error when trying to update the corrupt property with another wrong value + HttpResponseException exception = + assertThrows( + HttpResponseException.class, + () -> patchEntity(term.getId(), jsonForWrongUpdate, updatedTerm, ADMIN_AUTH_HEADERS)); + assertEquals(400, exception.getStatusCode()); + assertTrue( + exception.getMessage().contains("Values '[wrongValue"), + "Error should mention the invalid values"); + + // SCENARIO 3: Remove wrong values in corrupt property and add only valid values + // This should succeed + String jsonForValidUpdate = JsonUtils.pojoToJson(updatedTerm); + Map validExtension = new HashMap<>(); + validExtension.put("certified", List.of("draft", "official")); + validExtension.put("sensitivity", List.of("internal")); + updatedTerm.setExtension(validExtension); + + ObjectMapper mapper = new ObjectMapper(); + ArrayNode patchArray = mapper.createArrayNode(); + ObjectNode patchOp = mapper.createObjectNode(); + patchOp.put("op", "replace"); + patchOp.put("path", "/extension"); + patchOp.set("value", mapper.valueToTree(validExtension)); + patchArray.add(patchOp); + + // This should succeed since we're replacing the corrupt property with valid values + GlossaryTerm properlyUpdatedTerm = patchEntity(term.getId(), patchArray, ADMIN_AUTH_HEADERS); + + // Verify the corrupt data is replaced with valid values + JsonNode finalExtension = JsonUtils.valueToTree(properlyUpdatedTerm.getExtension()); + JsonNode certifiedValues = finalExtension.get("certified"); + assertNotNull(certifiedValues, "Certified values should exist"); + assertTrue(certifiedValues.isArray(), "Certified should be an array"); + assertEquals(2, certifiedValues.size(), "Should contain 2 values"); + } + public Glossary createGlossary( TestInfo test, List reviewers, List owners) throws IOException {