mirror of
https://github.com/datahub-project/datahub.git
synced 2025-12-29 10:57:52 +00:00
feat(graphql) Prevent duplicate glossary term names within a group (#8187)
This commit is contained in:
parent
5b9fd977eb
commit
91cac94ed1
@ -3,6 +3,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary;
|
||||
import com.linkedin.common.urn.GlossaryNodeUrn;
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.common.urn.UrnUtils;
|
||||
import com.linkedin.data.DataMap;
|
||||
import com.linkedin.data.template.SetMode;
|
||||
import com.linkedin.datahub.graphql.QueryContext;
|
||||
import com.linkedin.datahub.graphql.exception.AuthorizationException;
|
||||
@ -11,10 +12,15 @@ import com.linkedin.datahub.graphql.generated.OwnerEntityType;
|
||||
import com.linkedin.datahub.graphql.generated.OwnershipType;
|
||||
import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils;
|
||||
import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils;
|
||||
import com.linkedin.entity.EntityResponse;
|
||||
import com.linkedin.entity.client.EntityClient;
|
||||
import com.linkedin.glossary.GlossaryTermInfo;
|
||||
import com.linkedin.metadata.entity.EntityService;
|
||||
import com.linkedin.metadata.key.GlossaryTermKey;
|
||||
import com.linkedin.metadata.query.filter.Filter;
|
||||
import com.linkedin.metadata.search.SearchEntity;
|
||||
import com.linkedin.metadata.search.SearchResult;
|
||||
import com.linkedin.metadata.search.utils.QueryUtils;
|
||||
import com.linkedin.metadata.utils.EntityKeyUtils;
|
||||
import com.linkedin.mxe.MetadataChangeProposal;
|
||||
import graphql.schema.DataFetcher;
|
||||
@ -23,8 +29,14 @@ import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;
|
||||
import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*;
|
||||
@ -36,6 +48,8 @@ import static com.linkedin.metadata.Constants.*;
|
||||
@RequiredArgsConstructor
|
||||
public class CreateGlossaryTermResolver implements DataFetcher<CompletableFuture<String>> {
|
||||
|
||||
static final String PARENT_NODE_INDEX_FIELD_NAME = "parentNode.keyword";
|
||||
|
||||
private final EntityClient _entityClient;
|
||||
private final EntityService _entityService;
|
||||
|
||||
@ -48,6 +62,8 @@ public class CreateGlossaryTermResolver implements DataFetcher<CompletableFuture
|
||||
|
||||
return CompletableFuture.supplyAsync(() -> {
|
||||
if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) {
|
||||
// Ensure there isn't another glossary term with the same name at this level of the glossary
|
||||
validateGlossaryTermName(parentNode, context, input.getName());
|
||||
try {
|
||||
final GlossaryTermKey key = new GlossaryTermKey();
|
||||
|
||||
@ -95,4 +111,50 @@ public class CreateGlossaryTermResolver implements DataFetcher<CompletableFuture
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Filter buildParentNodeFilter(final Urn parentNodeUrn) {
|
||||
final Map<String, String> criterionMap = new HashMap<>();
|
||||
criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString());
|
||||
return QueryUtils.newFilter(criterionMap);
|
||||
}
|
||||
|
||||
private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryContext context) {
|
||||
try {
|
||||
final Filter filter = buildParentNodeFilter(parentNode);
|
||||
final SearchResult searchResult = _entityClient.filter(
|
||||
GLOSSARY_TERM_ENTITY_NAME,
|
||||
filter,
|
||||
null,
|
||||
0,
|
||||
1000,
|
||||
context.getAuthentication());
|
||||
|
||||
final List<Urn> termUrns = searchResult.getEntities()
|
||||
.stream()
|
||||
.map(SearchEntity::getEntity)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return _entityClient.batchGetV2(
|
||||
GLOSSARY_TERM_ENTITY_NAME,
|
||||
new HashSet<>(termUrns),
|
||||
Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME),
|
||||
context.getAuthentication());
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Failed fetching Glossary Terms with the same parent", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) {
|
||||
Map<Urn, EntityResponse> entities = getTermsWithSameParent(parentNode, context);
|
||||
|
||||
entities.forEach((urn, entityResponse) -> {
|
||||
if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) {
|
||||
DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data();
|
||||
GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap);
|
||||
if (termInfo.hasName() && termInfo.getName().equals(name)) {
|
||||
throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,24 +2,40 @@ package com.linkedin.datahub.graphql.resolvers.glossary;
|
||||
|
||||
import com.datahub.authentication.Authentication;
|
||||
import com.linkedin.common.urn.GlossaryNodeUrn;
|
||||
import com.linkedin.common.urn.Urn;
|
||||
import com.linkedin.common.urn.UrnUtils;
|
||||
import com.linkedin.datahub.graphql.QueryContext;
|
||||
import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput;
|
||||
import com.linkedin.entity.Aspect;
|
||||
import com.linkedin.entity.EntityResponse;
|
||||
import com.linkedin.entity.EnvelopedAspect;
|
||||
import com.linkedin.entity.EnvelopedAspectMap;
|
||||
import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils;
|
||||
import com.linkedin.entity.client.EntityClient;
|
||||
import com.linkedin.glossary.GlossaryTermInfo;
|
||||
import com.linkedin.metadata.key.GlossaryTermKey;
|
||||
import com.linkedin.metadata.search.SearchEntity;
|
||||
import com.linkedin.metadata.search.SearchEntityArray;
|
||||
import com.linkedin.metadata.search.SearchResult;
|
||||
import com.linkedin.metadata.entity.EntityService;
|
||||
import com.linkedin.mxe.MetadataChangeProposal;
|
||||
import graphql.schema.DataFetchingEnvironment;
|
||||
import org.mockito.Mockito;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CompletionException;
|
||||
|
||||
import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext;
|
||||
import static org.testng.Assert.assertThrows;
|
||||
import static com.linkedin.metadata.Constants.*;
|
||||
|
||||
|
||||
public class CreateGlossaryTermResolverTest {
|
||||
|
||||
private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345";
|
||||
|
||||
private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput(
|
||||
"test-id",
|
||||
"test-name",
|
||||
@ -69,7 +85,7 @@ public class CreateGlossaryTermResolverTest {
|
||||
|
||||
@Test
|
||||
public void testGetSuccess() throws Exception {
|
||||
EntityClient mockClient = Mockito.mock(EntityClient.class);
|
||||
EntityClient mockClient = initMockClient();
|
||||
EntityService mockService = Mockito.mock(EntityService.class);
|
||||
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);
|
||||
final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn);
|
||||
@ -86,7 +102,7 @@ public class CreateGlossaryTermResolverTest {
|
||||
|
||||
@Test
|
||||
public void testGetSuccessNoDescription() throws Exception {
|
||||
EntityClient mockClient = Mockito.mock(EntityClient.class);
|
||||
EntityClient mockClient = initMockClient();
|
||||
EntityService mockService = Mockito.mock(EntityService.class);
|
||||
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);
|
||||
final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn);
|
||||
@ -103,7 +119,7 @@ public class CreateGlossaryTermResolverTest {
|
||||
|
||||
@Test
|
||||
public void testGetSuccessNoParentNode() throws Exception {
|
||||
EntityClient mockClient = Mockito.mock(EntityClient.class);
|
||||
EntityClient mockClient = initMockClient();
|
||||
EntityService mockService = Mockito.mock(EntityService.class);
|
||||
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);
|
||||
final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null);
|
||||
@ -117,4 +133,80 @@ public class CreateGlossaryTermResolverTest {
|
||||
Mockito.eq(false)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetFailureExistingTermSameName() throws Exception {
|
||||
EntityClient mockClient = Mockito.mock(EntityClient.class);
|
||||
|
||||
Mockito.when(
|
||||
mockClient.filter(
|
||||
Mockito.eq(GLOSSARY_TERM_ENTITY_NAME),
|
||||
Mockito.any(),
|
||||
Mockito.eq(null),
|
||||
Mockito.eq(0),
|
||||
Mockito.eq(1000),
|
||||
Mockito.any()
|
||||
)
|
||||
).thenReturn(new SearchResult().setEntities(
|
||||
new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN)))
|
||||
));
|
||||
|
||||
Map<Urn, EntityResponse> result = new HashMap<>();
|
||||
EnvelopedAspectMap map = new EnvelopedAspectMap();
|
||||
GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name");
|
||||
map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data())));
|
||||
result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map));
|
||||
|
||||
Mockito.when(
|
||||
mockClient.batchGetV2(
|
||||
Mockito.eq(GLOSSARY_TERM_ENTITY_NAME),
|
||||
Mockito.any(),
|
||||
Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)),
|
||||
Mockito.any()
|
||||
)
|
||||
).thenReturn(result);
|
||||
|
||||
EntityService mockService = Mockito.mock(EntityService.class);
|
||||
DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class);
|
||||
|
||||
CreateGlossaryEntityInput input = new CreateGlossaryEntityInput(
|
||||
"test-id",
|
||||
"Duplicated Name",
|
||||
"test-description",
|
||||
"urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"
|
||||
);
|
||||
setupTest(mockEnv, input, "test-description", parentNodeUrn);
|
||||
CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService);
|
||||
assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join());
|
||||
|
||||
Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(
|
||||
Mockito.any(),
|
||||
Mockito.any(Authentication.class)
|
||||
);
|
||||
}
|
||||
|
||||
private EntityClient initMockClient() throws Exception {
|
||||
EntityClient mockClient = Mockito.mock(EntityClient.class);
|
||||
|
||||
Mockito.when(
|
||||
mockClient.filter(
|
||||
Mockito.eq(GLOSSARY_TERM_ENTITY_NAME),
|
||||
Mockito.any(),
|
||||
Mockito.eq(null),
|
||||
Mockito.eq(0),
|
||||
Mockito.eq(1000),
|
||||
Mockito.any()
|
||||
)
|
||||
).thenReturn(new SearchResult().setEntities(new SearchEntityArray()));
|
||||
Mockito.when(
|
||||
mockClient.batchGetV2(
|
||||
Mockito.eq(GLOSSARY_TERM_ENTITY_NAME),
|
||||
Mockito.any(),
|
||||
Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)),
|
||||
Mockito.any()
|
||||
)
|
||||
).thenReturn(new HashMap<>());
|
||||
|
||||
return mockClient;
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user