feat(views): apply views to homepage entity counts & recommendations (#10283)

Co-authored-by: gaurav2733 <gaurav.wagh@apptware.com>
This commit is contained in:
ksrinath 2024-04-18 13:50:37 +05:30 committed by GitHub
parent a041a2ee52
commit 91e3dc829e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 224 additions and 49 deletions

View File

@ -55,6 +55,7 @@ import com.linkedin.datahub.graphql.generated.DataPlatformInstance;
import com.linkedin.datahub.graphql.generated.Dataset;
import com.linkedin.datahub.graphql.generated.DatasetStatsSummary;
import com.linkedin.datahub.graphql.generated.Domain;
import com.linkedin.datahub.graphql.generated.ERModelRelationship;
import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties;
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
@ -987,8 +988,10 @@ public class GmsGraphQLEngine {
.dataFetcher("listUsers", new ListUsersResolver(this.entityClient))
.dataFetcher("listGroups", new ListGroupsResolver(this.entityClient))
.dataFetcher(
"listRecommendations", new ListRecommendationsResolver(recommendationsService))
.dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient))
"listRecommendations",
new ListRecommendationsResolver(recommendationsService, viewService))
.dataFetcher(
"getEntityCounts", new EntityCountsResolver(this.entityClient, viewService))
.dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService))
.dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient))
.dataFetcher(

View File

@ -1,5 +1,6 @@
package com.linkedin.datahub.graphql.resolvers;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*;
import static com.linkedin.metadata.Constants.*;
import com.datahub.authentication.Authentication;
@ -7,11 +8,13 @@ import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableSet;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.StringArray;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.exception.ValidationException;
import com.linkedin.datahub.graphql.generated.AndFilterInput;
import com.linkedin.datahub.graphql.generated.FacetFilterInput;
import com.linkedin.datahub.graphql.resolvers.search.SearchUtils;
import com.linkedin.metadata.query.filter.Condition;
import com.linkedin.metadata.query.filter.ConjunctiveCriterion;
import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
@ -20,7 +23,10 @@ import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.search.utils.ESUtils;
import com.linkedin.metadata.search.utils.QueryUtils;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.view.DataHubViewInfo;
import graphql.schema.DataFetchingEnvironment;
import io.datahubproject.metadata.context.OperationContext;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@ -226,4 +232,14 @@ public class ResolverUtils {
}
return QueryUtils.newFilter(urnMatchCriterion);
}
public static Filter viewFilter(
OperationContext opContext, ViewService viewService, String viewUrn) {
if (viewUrn == null) {
return null;
}
DataHubViewInfo viewInfo = resolveView(opContext, viewService, UrnUtils.getUrn(viewUrn));
Filter result = SearchUtils.combineFilters(null, viewInfo.getDefinition().getFilter());
return result;
}
}

View File

@ -8,6 +8,7 @@ import com.linkedin.datahub.graphql.generated.EntityCountResult;
import com.linkedin.datahub.graphql.generated.EntityCountResults;
import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.service.ViewService;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import io.opentelemetry.extension.annotations.WithSpan;
@ -20,8 +21,11 @@ public class EntityCountsResolver implements DataFetcher<CompletableFuture<Entit
private final EntityClient _entityClient;
public EntityCountsResolver(final EntityClient entityClient) {
private final ViewService _viewService;
public EntityCountsResolver(final EntityClient entityClient, final ViewService viewService) {
_entityClient = entityClient;
_viewService = viewService;
}
@Override
@ -44,7 +48,8 @@ public class EntityCountsResolver implements DataFetcher<CompletableFuture<Entit
context.getOperationContext(),
input.getTypes().stream()
.map(EntityTypeMapper::getName)
.collect(Collectors.toList()));
.collect(Collectors.toList()),
viewFilter(context.getOperationContext(), _viewService, input.getViewUrn()));
// bind to a result.
List<EntityCountResult> resultList =

View File

@ -22,6 +22,7 @@ import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.recommendation.EntityRequestContext;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.recommendation.SearchRequestContext;
import com.linkedin.metadata.service.ViewService;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import io.opentelemetry.extension.annotations.WithSpan;
@ -44,6 +45,7 @@ public class ListRecommendationsResolver
new ListRecommendationsResult(Collections.emptyList());
private final RecommendationsService _recommendationsService;
private final ViewService _viewService;
@WithSpan
@Override
@ -60,6 +62,7 @@ public class ListRecommendationsResolver
_recommendationsService.listRecommendations(
context.getOperationContext(),
mapRequestContext(input.getRequestContext()),
viewFilter(context.getOperationContext(), _viewService, input.getViewUrn()),
input.getLimit());
return ListRecommendationsResult.builder()
.setModules(

View File

@ -1194,6 +1194,11 @@ Input for the get entity counts endpoint
"""
input EntityCountInput {
types: [EntityType!]
"""
Optional - A View to apply when generating results
"""
viewUrn: String
}
"""

View File

@ -23,6 +23,11 @@ input ListRecommendationsInput {
Max number of modules to return
"""
limit: Int
"""
Optional - A View to apply when generating results
"""
viewUrn: String
}
"""

View File

@ -22,6 +22,7 @@ import {
} from '../onboarding/config/HomePageOnboardingConfig';
import { useToggleEducationStepIdsAllowList } from '../onboarding/useToggleEducationStepIdsAllowList';
import { useBusinessAttributesFlag } from '../useAppConfig';
import { useUserContext } from '../context/useUserContext';
const PLATFORMS_MODULE_ID = 'Platforms';
const MOST_POPULAR_MODULE_ID = 'HighUsageEntities';
@ -105,6 +106,9 @@ export const HomePageRecommendations = ({ user }: Props) => {
const browseEntityList = entityRegistry.getBrowseEntityTypes();
const userUrn = user?.urn;
const userContext = useUserContext();
const viewUrn = userContext.localState?.selectedViewUrn;
const businessAttributesFlag = useBusinessAttributesFlag();
const showSimplifiedHomepage = user?.settings?.appearance?.showSimplifiedHomepage;
@ -113,6 +117,7 @@ export const HomePageRecommendations = ({ user }: Props) => {
variables: {
input: {
types: browseEntityList,
viewUrn
},
},
});
@ -133,6 +138,7 @@ export const HomePageRecommendations = ({ user }: Props) => {
scenario,
},
limit: 10,
viewUrn
},
},
fetchPolicy: 'no-cache',

View File

@ -571,9 +571,11 @@ public class JavaEntityClient implements EntityClient {
@Override
@Nonnull
public Map<String, Long> batchGetTotalEntityCount(
@Nonnull OperationContext opContext, @Nonnull List<String> entityNames)
@Nonnull OperationContext opContext,
@Nonnull List<String> entityNames,
@Nullable Filter filter)
throws RemoteInvocationException {
return searchService.docCountPerEntity(opContext, entityNames);
return searchService.docCountPerEntity(opContext, entityNames, filter);
}
/** List all urns existing for a particular Entity type. */

View File

@ -8,6 +8,7 @@ import com.linkedin.metadata.Constants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventType;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
import com.linkedin.metadata.recommendation.RecommendationRequestContext;
@ -23,6 +24,7 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.action.search.SearchRequest;
@ -99,7 +101,9 @@ public class MostPopularSource implements EntityRecommendationSource {
@Override
@WithSpan
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
SearchRequest searchRequest = buildSearchRequest(opContext);
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getMostPopular").time()) {
final SearchResponse searchResponse =

View File

@ -8,6 +8,7 @@ import com.linkedin.metadata.Constants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventType;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
import com.linkedin.metadata.recommendation.RecommendationRequestContext;
@ -22,6 +23,7 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.action.search.SearchRequest;
@ -98,7 +100,9 @@ public class RecentlyEditedSource implements EntityRecommendationSource {
@Override
@WithSpan
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
SearchRequest searchRequest =
buildSearchRequest(opContext.getSessionActorContext().getActorUrn());
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyEdited").time()) {

View File

@ -8,6 +8,7 @@ import com.linkedin.metadata.Constants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventType;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
import com.linkedin.metadata.recommendation.RecommendationRequestContext;
@ -22,6 +23,7 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.action.search.SearchRequest;
@ -98,7 +100,9 @@ public class RecentlyViewedSource implements EntityRecommendationSource {
@Override
@WithSpan
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
SearchRequest searchRequest =
buildSearchRequest(opContext.getSessionActorContext().getActorUrn());
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) {

View File

@ -40,13 +40,20 @@ public class SearchService {
public Map<String, Long> docCountPerEntity(
@Nonnull OperationContext opContext, @Nonnull List<String> entityNames) {
return docCountPerEntity(opContext, entityNames, null);
}
public Map<String, Long> docCountPerEntity(
@Nonnull OperationContext opContext,
@Nonnull List<String> entityNames,
@Nullable Filter filter) {
return getEntitiesToSearch(opContext, entityNames, 0).stream()
.collect(
Collectors.toMap(
Function.identity(),
entityName ->
_entityDocCountCache
.getEntityDocCount(opContext)
.getEntityDocCount(opContext, filter)
.getOrDefault(entityName.toLowerCase(), 0L)));
}

View File

@ -3,6 +3,7 @@ package com.linkedin.metadata.search.cache;
import com.google.common.base.Suppliers;
import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.utils.ConcurrencyUtils;
import io.datahubproject.metadata.context.OperationContext;
@ -15,12 +16,22 @@ import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
public class EntityDocCountCache {
private final EntityRegistry entityRegistry;
private final EntitySearchService entitySearchService;
private final EntityDocCountCacheConfiguration config;
private final Map<String, Supplier<Map<String, Long>>> entityDocCounts;
private final Map<EntityDocCountsKey, Supplier<Map<String, Long>>> entityDocCounts;
@AllArgsConstructor
@EqualsAndHashCode
private static final class EntityDocCountsKey {
private final String searchContextId;
private final Filter filter;
}
public EntityDocCountCache(
EntityRegistry entityRegistry,
@ -32,17 +43,27 @@ public class EntityDocCountCache {
this.entityDocCounts = new ConcurrentHashMap<>();
}
private Map<String, Long> fetchEntityDocCount(@Nonnull OperationContext opContext) {
private Map<String, Long> fetchEntityDocCount(
@Nonnull OperationContext opContext, @Nullable Filter filter) {
return ConcurrencyUtils.transformAndCollectAsync(
entityRegistry.getEntitySpecs().keySet(),
Function.identity(),
Collectors.toMap(Function.identity(), v -> entitySearchService.docCount(opContext, v)));
Collectors.toMap(
Function.identity(), v -> entitySearchService.docCount(opContext, v, filter)));
}
@WithSpan
public Map<String, Long> getEntityDocCount(@Nonnull OperationContext opContext) {
return getEntityDocCount(opContext, null);
}
@WithSpan
public Map<String, Long> getEntityDocCount(
@Nonnull OperationContext opContext, @Nullable Filter filter) {
return entityDocCounts
.computeIfAbsent(opContext.getSearchContextId(), k -> buildSupplier(opContext))
.computeIfAbsent(
new EntityDocCountsKey(opContext.getSearchContextId(), filter),
k -> buildSupplier(opContext, filter))
.get();
}
@ -53,8 +74,9 @@ public class EntityDocCountCache {
.collect(Collectors.toList());
}
private Supplier<Map<String, Long>> buildSupplier(@Nonnull OperationContext opContext) {
private Supplier<Map<String, Long>> buildSupplier(
@Nonnull OperationContext opContext, @Nullable Filter filter) {
return Suppliers.memoizeWithExpiration(
() -> fetchEntityDocCount(opContext), config.getTtlSeconds(), TimeUnit.SECONDS);
() -> fetchEntityDocCount(opContext, filter), config.getTtlSeconds(), TimeUnit.SECONDS);
}
}

View File

@ -81,11 +81,13 @@ public class ElasticSearchService implements EntitySearchService, ElasticSearchI
}
@Override
public long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName) {
public long docCount(
@Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filter) {
return esSearchDAO.docCount(
opContext.withSearchFlags(
flags -> applyDefaultSearchFlags(flags, null, DEFAULT_SERVICE_SEARCH_FLAGS)),
entityName);
entityName,
filter);
}
@Override

View File

@ -78,12 +78,17 @@ public class ESSearchDAO {
@Nullable private final CustomSearchConfiguration customSearchConfiguration;
public long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName) {
return docCount(opContext, entityName, null);
}
public long docCount(
@Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filter) {
EntitySpec entitySpec = opContext.getEntityRegistry().getEntitySpec(entityName);
CountRequest countRequest =
new CountRequest(opContext.getSearchContext().getIndexConvention().getIndexName(entitySpec))
.query(
SearchRequestHandler.getFilterQuery(
opContext, null, entitySpec.getSearchableFieldTypes()));
opContext, filter, entitySpec.getSearchableFieldTypes()));
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) {
return client.count(countRequest, RequestOptions.DEFAULT).getCount();
} catch (IOException e) {

View File

@ -83,6 +83,7 @@ public class RecommendationsServiceTest {
TestOperationContexts.userContextNoSearchAuthorization(
Urn.createFromString("urn:li:corpuser:me")),
new RecommendationRequestContext().setScenario(ScenarioType.HOME),
null,
10);
assertTrue(result.isEmpty());
@ -95,6 +96,7 @@ public class RecommendationsServiceTest {
TestOperationContexts.userContextNoSearchAuthorization(
Urn.createFromString("urn:li:corpuser:me")),
new RecommendationRequestContext().setScenario(ScenarioType.HOME),
null,
10);
assertEquals(result.size(), 1);
RecommendationModule module = result.get(0);
@ -112,6 +114,7 @@ public class RecommendationsServiceTest {
TestOperationContexts.userContextNoSearchAuthorization(
Urn.createFromString("urn:li:corpuser:me")),
new RecommendationRequestContext().setScenario(ScenarioType.HOME),
null,
10);
assertEquals(result.size(), 4);
module = result.get(0);
@ -141,6 +144,7 @@ public class RecommendationsServiceTest {
TestOperationContexts.userContextNoSearchAuthorization(
Urn.createFromString("urn:li:corpuser:me")),
new RecommendationRequestContext().setScenario(ScenarioType.HOME),
null,
2);
assertEquals(result.size(), 2);
module = result.get(0);

View File

@ -1,9 +1,7 @@
package com.linkedin.metadata.recommendation.candidatesource;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
@ -15,6 +13,7 @@ import com.linkedin.common.urn.TestEntityUrn;
import com.linkedin.common.urn.Urn;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.query.filter.Criterion;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationParams;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
@ -38,6 +37,7 @@ public class EntitySearchAggregationCandidateSourceTest {
private EntitySearchAggregationSource valueBasedCandidateSource;
private EntitySearchAggregationSource urnBasedCandidateSource;
private OperationContext opContext;
private Filter filter;
private static final Urn USER = new CorpuserUrn("test");
private static final RecommendationRequestContext CONTEXT =
@ -49,6 +49,7 @@ public class EntitySearchAggregationCandidateSourceTest {
Mockito.reset(entitySearchService);
valueBasedCandidateSource = buildCandidateSource("testValue", false);
urnBasedCandidateSource = buildCandidateSource("testUrn", true);
filter = new Filter();
}
private EntitySearchAggregationSource buildCandidateSource(
@ -97,12 +98,28 @@ public class EntitySearchAggregationCandidateSourceTest {
public void testWhenSearchServiceReturnsEmpty() {
Mockito.when(
entitySearchService.aggregateByValue(
any(OperationContext.class), eq(null), eq("testValue"), eq(null), anyInt()))
any(OperationContext.class),
eq(Collections.emptyList()),
eq("testValue"),
same(filter),
anyInt()))
.thenReturn(Collections.emptyMap());
List<RecommendationContent> candidates =
valueBasedCandidateSource.getRecommendations(opContext, CONTEXT);
valueBasedCandidateSource.getRecommendations(opContext, CONTEXT, filter);
assertTrue(candidates.isEmpty());
assertFalse(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent());
assertFalse(
valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT, filter).isPresent());
// Mockito's default stub could also return an empty map.
// Adding explicit verification to guard against this.
verify(entitySearchService, times(2))
.aggregateByValue(
any(OperationContext.class),
eq(Collections.emptyList()),
eq("testValue"),
same(filter),
anyInt());
}
@Test
@ -110,10 +127,10 @@ public class EntitySearchAggregationCandidateSourceTest {
// One result
Mockito.when(
entitySearchService.aggregateByValue(
any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt()))
any(OperationContext.class), any(), eq("testValue"), same(filter), anyInt()))
.thenReturn(ImmutableMap.of("value1", 1L));
List<RecommendationContent> candidates =
valueBasedCandidateSource.getRecommendations(opContext, CONTEXT);
valueBasedCandidateSource.getRecommendations(opContext, CONTEXT, filter);
assertEquals(candidates.size(), 1);
RecommendationContent content = candidates.get(0);
assertEquals(content.getValue(), "value1");
@ -128,14 +145,15 @@ public class EntitySearchAggregationCandidateSourceTest {
new Criterion().setField("testValue").setValue("value1"));
assertNotNull(params.getContentParams());
assertEquals(params.getContentParams().getCount().longValue(), 1L);
assertTrue(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent());
assertTrue(
valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT, filter).isPresent());
// Multiple result
Mockito.when(
entitySearchService.aggregateByValue(
any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt()))
any(OperationContext.class), any(), eq("testValue"), same(filter), anyInt()))
.thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L));
candidates = valueBasedCandidateSource.getRecommendations(opContext, CONTEXT);
candidates = valueBasedCandidateSource.getRecommendations(opContext, CONTEXT, filter);
assertEquals(candidates.size(), 2);
content = candidates.get(0);
assertEquals(content.getValue(), "value3");
@ -163,7 +181,8 @@ public class EntitySearchAggregationCandidateSourceTest {
new Criterion().setField("testValue").setValue("value2"));
assertNotNull(params.getContentParams());
assertEquals(params.getContentParams().getCount().longValue(), 2L);
assertTrue(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent());
assertTrue(
valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT, filter).isPresent());
}
@Test
@ -174,10 +193,10 @@ public class EntitySearchAggregationCandidateSourceTest {
Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3");
Mockito.when(
entitySearchService.aggregateByValue(
any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt()))
any(OperationContext.class), any(), eq("testUrn"), same(filter), anyInt()))
.thenReturn(ImmutableMap.of(testUrn1.toString(), 1L));
List<RecommendationContent> candidates =
urnBasedCandidateSource.getRecommendations(opContext, CONTEXT);
urnBasedCandidateSource.getRecommendations(opContext, CONTEXT, filter);
assertEquals(candidates.size(), 1);
RecommendationContent content = candidates.get(0);
assertEquals(content.getValue(), testUrn1.toString());
@ -192,16 +211,17 @@ public class EntitySearchAggregationCandidateSourceTest {
new Criterion().setField("testUrn").setValue(testUrn1.toString()));
assertNotNull(params.getContentParams());
assertEquals(params.getContentParams().getCount().longValue(), 1L);
assertTrue(urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent());
assertTrue(
urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT, filter).isPresent());
// Multiple result
Mockito.when(
entitySearchService.aggregateByValue(
any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt()))
any(OperationContext.class), any(), eq("testUrn"), same(filter), anyInt()))
.thenReturn(
ImmutableMap.of(
testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L));
candidates = urnBasedCandidateSource.getRecommendations(opContext, CONTEXT);
candidates = urnBasedCandidateSource.getRecommendations(opContext, CONTEXT, filter);
assertEquals(candidates.size(), 2);
content = candidates.get(0);
assertEquals(content.getValue(), testUrn3.toString());
@ -229,6 +249,7 @@ public class EntitySearchAggregationCandidateSourceTest {
new Criterion().setField("testUrn").setValue(testUrn2.toString()));
assertNotNull(params.getContentParams());
assertEquals(params.getContentParams().getCount().longValue(), 2L);
assertTrue(urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent());
assertTrue(
urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT, filter).isPresent());
}
}

View File

@ -1,11 +1,13 @@
package com.linkedin.metadata.recommendation.candidatesource;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
import com.linkedin.metadata.recommendation.RecommendationRequestContext;
import io.datahubproject.metadata.context.OperationContext;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -42,7 +44,9 @@ public class TestSource implements RecommendationSource {
@Override
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
return contents;
}
}

View File

@ -393,9 +393,17 @@ public interface EntityClient {
@Nonnull
Map<String, Long> batchGetTotalEntityCount(
@Nonnull OperationContext opContext, @Nonnull List<String> entityName)
@Nonnull OperationContext opContext,
@Nonnull List<String> entityName,
@Nullable Filter filter)
throws RemoteInvocationException;
default Map<String, Long> batchGetTotalEntityCount(
@Nonnull OperationContext opContext, @Nonnull List<String> entityName)
throws RemoteInvocationException {
return batchGetTotalEntityCount(opContext, entityName, null);
}
/** List all urns existing for a particular Entity type. */
ListUrnsResult listUrns(
@Nonnull OperationContext opContext,

View File

@ -772,8 +772,14 @@ public class RestliEntityClient extends BaseClient implements EntityClient {
@Override
@Nonnull
public Map<String, Long> batchGetTotalEntityCount(
@Nonnull OperationContext opContext, @Nonnull List<String> entityName)
@Nonnull OperationContext opContext,
@Nonnull List<String> entityName,
@Nullable Filter filter)
throws RemoteInvocationException {
if (filter != null) {
throw new UnsupportedOperationException("Filter not yet supported in restli-client.");
}
EntitiesDoBatchGetTotalEntityCountRequestBuilder requestBuilder =
ENTITIES_REQUEST_BUILDERS
.actionBatchGetTotalEntityCount()

View File

@ -1,5 +1,6 @@
package com.linkedin.metadata.recommendation;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.candidatesource.RecommendationSource;
import com.linkedin.metadata.recommendation.ranker.RecommendationModuleRanker;
import com.linkedin.metadata.utils.ConcurrencyUtils;
@ -10,6 +11,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@ -57,6 +59,7 @@ public class RecommendationsService {
public List<RecommendationModule> listRecommendations(
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter,
int limit) {
// Get recommendation candidates from sources which are eligible, in parallel
@ -65,7 +68,7 @@ public class RecommendationsService {
_candidateSources.stream()
.filter(source -> source.isEligible(opContext, requestContext))
.collect(Collectors.toList()),
source -> source.getRecommendationModule(opContext, requestContext),
source -> source.getRecommendationModule(opContext, requestContext, filter),
(source, exception) -> {
log.error(
"Error while fetching candidate modules from source {}", source, exception);

View File

@ -6,6 +6,7 @@ import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.query.filter.Criterion;
import com.linkedin.metadata.query.filter.CriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.ContentParams;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationParams;
@ -72,10 +73,16 @@ public abstract class EntitySearchAggregationSource implements RecommendationSou
@Override
@WithSpan
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nullable RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nullable RecommendationRequestContext requestContext,
@Nullable Filter filter) {
Map<String, Long> aggregationResult =
entitySearchService.aggregateByValue(
opContext, getEntityNames(entityRegistry), getSearchFieldName(), null, getMaxContent());
opContext,
getEntityNames(entityRegistry),
getSearchFieldName(),
filter,
getMaxContent());
if (aggregationResult.isEmpty()) {
return Collections.emptyList();

View File

@ -5,6 +5,7 @@ import com.datahub.util.exception.ESQueryException;
import com.linkedin.common.urn.Urn;
import com.linkedin.metadata.datahubusage.DataHubUsageEventConstants;
import com.linkedin.metadata.datahubusage.DataHubUsageEventType;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationParams;
import com.linkedin.metadata.recommendation.RecommendationRenderType;
@ -19,6 +20,7 @@ import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.action.search.SearchRequest;
@ -78,7 +80,9 @@ public class RecentlySearchedSource implements RecommendationSource {
@Override
public List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
SearchRequest searchRequest =
buildSearchRequest(opContext.getSessionActorContext().getActorUrn());
try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) {

View File

@ -1,5 +1,6 @@
package com.linkedin.metadata.recommendation.candidatesource;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.recommendation.RecommendationContent;
import com.linkedin.metadata.recommendation.RecommendationContentArray;
import com.linkedin.metadata.recommendation.RecommendationModule;
@ -10,6 +11,7 @@ import io.opentelemetry.extension.annotations.WithSpan;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/** Base interface for defining a candidate source for recommendation module */
public interface RecommendationSource {
@ -42,7 +44,15 @@ public interface RecommendationSource {
*/
@WithSpan
List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext);
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter);
// retaining this for backward compatibility
default List<RecommendationContent> getRecommendations(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
return getRecommendations(opContext, requestContext, null);
}
/**
* Get the full recommendations module itself provided the request context.
@ -52,12 +62,15 @@ public interface RecommendationSource {
* @return list of recommendation candidates
*/
default Optional<RecommendationModule> getRecommendationModule(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
@Nonnull OperationContext opContext,
@Nonnull RecommendationRequestContext requestContext,
@Nullable Filter filter) {
if (!isEligible(opContext, requestContext)) {
return Optional.empty();
}
List<RecommendationContent> recommendations = getRecommendations(opContext, requestContext);
List<RecommendationContent> recommendations =
getRecommendations(opContext, requestContext, filter);
if (recommendations.isEmpty()) {
return Optional.empty();
}
@ -69,4 +82,10 @@ public interface RecommendationSource {
.setRenderType(getRenderType())
.setContent(new RecommendationContentArray(recommendations)));
}
// retaining this for backward compatibility
default Optional<RecommendationModule> getRecommendationModule(
@Nonnull OperationContext opContext, @Nonnull RecommendationRequestContext requestContext) {
return getRecommendationModule(opContext, requestContext);
}
}

View File

@ -24,8 +24,14 @@ public interface EntitySearchService {
* Get the number of documents corresponding to the entity
*
* @param entityName name of the entity
* @param filter optional filter
*/
long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName);
long docCount(
@Nonnull OperationContext opContext, @Nonnull String entityName, @Nullable Filter filter);
default long docCount(@Nonnull OperationContext opContext, @Nonnull String entityName) {
return docCount(opContext, entityName, null);
}
/**
* Updates or inserts the given search document.