refactor(misc): testngJava fix, systemrestli client, cache key fix, e… (#8926)

This commit is contained in:
david-leifker 2023-09-30 22:47:59 -05:00 committed by GitHub
parent 4d9a7ce7c9
commit b61c38ab05
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 402 additions and 181 deletions

View File

@ -291,7 +291,7 @@ subprojects {
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1
if (project.configurations.getByName("testImplementation").getDependencies()
.any{ it.getName() == "testng" }) {
.any{ it.getName().contains("testng") }) {
useTestNG()
}
}

View File

@ -31,7 +31,7 @@ public class IndexUtils {
List<ReindexConfig> reindexConfigs = new ArrayList<>(_reindexConfigs);
if (reindexConfigs.isEmpty()) {
for (ElasticSearchIndexed elasticSearchIndexed : elasticSearchIndexedList) {
reindexConfigs.addAll(elasticSearchIndexed.getReindexConfigs());
reindexConfigs.addAll(elasticSearchIndexed.buildReindexConfigs());
}
_reindexConfigs = new ArrayList<>(reindexConfigs);
}

View File

@ -6,6 +6,7 @@ import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.models.registry.ConfigEntityRegistry;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.SearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import io.ebean.Database;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
@ -35,4 +36,7 @@ public class UpgradeCliApplicationTestConfiguration {
@MockBean
ConfigEntityRegistry configEntityRegistry;
@MockBean
public EntityIndexBuilders entityIndexBuilders;
}

View File

@ -8,6 +8,7 @@ import com.linkedin.metadata.utils.metrics.MetricUtils;
import io.ebean.PagedList;
import io.ebean.Transaction;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.sql.Timestamp;
@ -103,6 +104,9 @@ public interface AspectDao {
@Nonnull
PagedList<EbeanAspectV2> getPagedAspects(final RestoreIndicesArgs args);
@Nonnull
Stream<EntityAspect> streamAspects(String entityName, String aspectName);
int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn);
@Nonnull

View File

@ -3,6 +3,7 @@ package com.linkedin.metadata.entity;
import com.codahale.metrics.Timer;
import com.linkedin.data.template.GetMode;
import com.linkedin.data.template.SetMode;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.config.PreProcessHooks;
import com.datahub.util.RecordUtils;
import com.datahub.util.exception.ModelConversionException;
@ -93,6 +94,7 @@ import javax.annotation.Nullable;
import javax.persistence.EntityNotFoundException;
import io.ebean.Transaction;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import static com.linkedin.metadata.Constants.*;
@ -144,11 +146,11 @@ public class EntityServiceImpl implements EntityService {
private final Map<String, Set<String>> _entityToValidAspects;
private RetentionService _retentionService;
private final Boolean _alwaysEmitChangeLog;
@Getter
private final UpdateIndicesService _updateIndicesService;
private final PreProcessHooks _preProcessHooks;
protected static final int MAX_KEYS_PER_QUERY = 500;
private final Integer ebeanMaxTransactionRetry;
public EntityServiceImpl(
@ -180,6 +182,11 @@ public class EntityServiceImpl implements EntityService {
ebeanMaxTransactionRetry = retry != null ? retry : DEFAULT_MAX_TRANSACTION_RETRY;
}
@Override
public void setSystemEntityClient(SystemEntityClient systemEntityClient) {
this._updateIndicesService.setSystemEntityClient(systemEntityClient);
}
/**
* Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided
* set of aspect names.

View File

@ -41,6 +41,7 @@ import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@ -445,6 +446,12 @@ public class CassandraAspectDao implements AspectDao, AspectMigrationsDao {
return null;
}
@Nonnull
@Override
public Stream<EntityAspect> streamAspects(String entityName, String aspectName) {
// Not implemented
return null;
}
@Override
@Nonnull

View File

@ -42,6 +42,7 @@ import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@ -433,6 +434,18 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao {
.findPagedList();
}
@Override
@Nonnull
public Stream<EntityAspect> streamAspects(String entityName, String aspectName) {
ExpressionList<EbeanAspectV2> exp = _server.find(EbeanAspectV2.class)
.select(EbeanAspectV2.ALL_COLUMNS)
.where()
.eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION)
.eq(EbeanAspectV2.ASPECT_COLUMN, aspectName)
.like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%");
return exp.query().findStream().map(EbeanAspectV2::toEntityAspect);
}
@Override
@Nonnull
public Iterable<String> listAllUrns(int start, int pageSize) {

View File

@ -318,7 +318,7 @@ public class ElasticSearchGraphService implements GraphService, ElasticSearchInd
public void configure() {
log.info("Setting up elastic graph index");
try {
for (ReindexConfig config : getReindexConfigs()) {
for (ReindexConfig config : buildReindexConfigs()) {
_indexBuilder.buildIndex(config);
}
} catch (IOException e) {
@ -327,7 +327,7 @@ public class ElasticSearchGraphService implements GraphService, ElasticSearchInd
}
@Override
public List<ReindexConfig> getReindexConfigs() throws IOException {
public List<ReindexConfig> buildReindexConfigs() throws IOException {
return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME),
GraphRelationshipMappingsBuilder.getMappings(), Collections.emptyMap()));
}

View File

@ -46,8 +46,8 @@ public class ElasticSearchService implements EntitySearchService, ElasticSearchI
}
@Override
public List<ReindexConfig> getReindexConfigs() {
return indexBuilders.getReindexConfigs();
public List<ReindexConfig> buildReindexConfigs() {
return indexBuilders.buildReindexConfigs();
}
@Override

View File

@ -206,12 +206,7 @@ public class ESIndexBuilder {
// no need to reindex and only new mappings or dynamic settings
// Just update the additional mappings
if (indexState.isPureMappingsAddition()) {
log.info("Updating index {} mappings in place.", indexState.name());
PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings());
_searchClient.indices().putMapping(request, RequestOptions.DEFAULT);
log.info("Updated index {} with new mappings", indexState.name());
}
applyMappings(indexState, true);
if (indexState.requiresApplySettings()) {
UpdateSettingsRequest request = new UpdateSettingsRequest(indexState.name());
@ -234,6 +229,26 @@ public class ESIndexBuilder {
}
}
/**
* Apply mappings changes if reindex is not required
* @param indexState the state of the current and target index settings/mappings
* @param suppressError during reindex logic this is not an error, for structured properties it is an error
* @throws IOException communication issues with ES
*/
public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException {
if (indexState.isPureMappingsAddition()) {
log.info("Updating index {} mappings in place.", indexState.name());
PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings());
_searchClient.indices().putMapping(request, RequestOptions.DEFAULT);
log.info("Updated index {} with new mappings", indexState.name());
} else {
if (!suppressError) {
log.error("Attempted to apply invalid mappings. Current: {} Target: {}", indexState.currentMappings(),
indexState.targetMappings());
}
}
}
public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options, ReindexConfig config)
throws Exception {
GetAliasesResponse aliasesResponse = _searchClient.indices().getAlias(

View File

@ -1,35 +0,0 @@
package com.linkedin.metadata.search.elasticsearch.indexbuilder;
import com.linkedin.metadata.models.EntitySpec;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@RequiredArgsConstructor
public class EntityIndexBuilder implements ElasticSearchIndexed {
private final ESIndexBuilder indexBuilder;
private final EntitySpec entitySpec;
private final SettingsBuilder settingsBuilder;
private final String indexName;
@Override
public void reindexAll() throws IOException {
log.info("Setting up index: {}", indexName);
for (ReindexConfig config : getReindexConfigs()) {
indexBuilder.buildIndex(config);
}
}
@Override
public List<ReindexConfig> getReindexConfigs() throws IOException {
Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec);
Map<String, Object> settings = settingsBuilder.getSettings();
return List.of(indexBuilder.buildReindexState(indexName, mappings, settings));
}
}

View File

@ -3,8 +3,10 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
@ -14,32 +16,37 @@ import lombok.extern.slf4j.Slf4j;
@RequiredArgsConstructor
@Slf4j
public class EntityIndexBuilders implements ElasticSearchIndexed {
private final ESIndexBuilder indexBuilder;
private final EntityRegistry entityRegistry;
private final IndexConvention indexConvention;
private final SettingsBuilder settingsBuilder;
private final ESIndexBuilder indexBuilder;
private final EntityRegistry entityRegistry;
private final IndexConvention indexConvention;
private final SettingsBuilder settingsBuilder;
@Override
public void reindexAll() {
for (ReindexConfig config : getReindexConfigs()) {
try {
indexBuilder.buildIndex(config);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
public ESIndexBuilder getIndexBuilder() {
return indexBuilder;
}
@Override
public List<ReindexConfig> getReindexConfigs() {
return entityRegistry.getEntitySpecs().values().stream().flatMap(entitySpec -> {
try {
return new EntityIndexBuilder(indexBuilder, entitySpec, settingsBuilder, indexConvention.getIndexName(entitySpec))
.getReindexConfigs().stream();
} catch (IOException e) {
@Override
public void reindexAll() {
for (ReindexConfig config : buildReindexConfigs()) {
try {
indexBuilder.buildIndex(config);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public List<ReindexConfig> buildReindexConfigs() {
Map<String, Object> settings = settingsBuilder.getSettings();
return entityRegistry.getEntitySpecs().values().stream().map(entitySpec -> {
try {
Map<String, Object> mappings = MappingsBuilder.getMappings(entitySpec);
return indexBuilder.buildReindexState(indexConvention.getIndexName(entitySpec), mappings, settings);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
).collect(Collectors.toList());
}
}
).collect(Collectors.toList());
}
}

View File

@ -51,6 +51,8 @@ public class MappingsBuilder {
public static final String ALIAS = "alias";
public static final String PATH = "path";
public static final String PROPERTIES = "properties";
private MappingsBuilder() {
}
@ -66,7 +68,7 @@ public class MappingsBuilder {
mappings.put("urn", getMappingsForUrn());
mappings.put("runId", getMappingsForRunId());
return ImmutableMap.of("properties", mappings);
return ImmutableMap.of(PROPERTIES, mappings);
}
private static Map<String, Object> getMappingsForUrn() {
@ -98,42 +100,9 @@ public class MappingsBuilder {
Map<String, Object> mappings = new HashMap<>();
Map<String, Object> mappingForField = new HashMap<>();
if (fieldType == FieldType.KEYWORD) {
mappingForField.put(TYPE, KEYWORD);
mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER);
// Add keyword subfield without lowercase filter
mappingForField.put(FIELDS, ImmutableMap.of(KEYWORD, KEYWORD_TYPE_MAP));
mappingForField.putAll(getMappingsForKeyword());
} else if (fieldType == FieldType.TEXT || fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) {
mappingForField.put(TYPE, KEYWORD);
mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER);
Map<String, Object> subFields = new HashMap<>();
if (fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) {
subFields.put(NGRAM, getPartialNgramConfigWithOverrides(
ImmutableMap.of(
ANALYZER, PARTIAL_ANALYZER
)
));
if (fieldType == FieldType.WORD_GRAM) {
for (Map.Entry<String, String> entry : Map.of(
WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER,
WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER,
WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER).entrySet()) {
String fieldName = entry.getKey();
String analyzerName = entry.getValue();
subFields.put(fieldName, ImmutableMap.of(
TYPE, TEXT,
ANALYZER, analyzerName
));
}
}
}
subFields.put(DELIMITED, ImmutableMap.of(
TYPE, TEXT,
ANALYZER, TEXT_ANALYZER,
SEARCH_ANALYZER, TEXT_SEARCH_ANALYZER,
SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER));
// Add keyword subfield without lowercase filter
subFields.put(KEYWORD, KEYWORD_TYPE_MAP);
mappingForField.put(FIELDS, subFields);
mappingForField.putAll(getMappingsForSearchText(fieldType));
} else if (fieldType == FieldType.BROWSE_PATH) {
mappingForField.put(TYPE, TEXT);
mappingForField.put(FIELDS,
@ -189,6 +158,51 @@ public class MappingsBuilder {
return mappings;
}
private static Map<String, Object> getMappingsForKeyword() {
Map<String, Object> mappingForField = new HashMap<>();
mappingForField.put(TYPE, KEYWORD);
mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER);
// Add keyword subfield without lowercase filter
mappingForField.put(FIELDS, ImmutableMap.of(KEYWORD, KEYWORD_TYPE_MAP));
return mappingForField;
}
private static Map<String, Object> getMappingsForSearchText(FieldType fieldType) {
Map<String, Object> mappingForField = new HashMap<>();
mappingForField.put(TYPE, KEYWORD);
mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER);
Map<String, Object> subFields = new HashMap<>();
if (fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) {
subFields.put(NGRAM, getPartialNgramConfigWithOverrides(
ImmutableMap.of(
ANALYZER, PARTIAL_ANALYZER
)
));
if (fieldType == FieldType.WORD_GRAM) {
for (Map.Entry<String, String> entry : Map.of(
WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER,
WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER,
WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER).entrySet()) {
String fieldName = entry.getKey();
String analyzerName = entry.getValue();
subFields.put(fieldName, ImmutableMap.of(
TYPE, TEXT,
ANALYZER, analyzerName
));
}
}
}
subFields.put(DELIMITED, ImmutableMap.of(
TYPE, TEXT,
ANALYZER, TEXT_ANALYZER,
SEARCH_ANALYZER, TEXT_SEARCH_ANALYZER,
SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER));
// Add keyword subfield without lowercase filter
subFields.put(KEYWORD, KEYWORD_TYPE_MAP);
mappingForField.put(FIELDS, subFields);
return mappingForField;
}
private static Map<String, Object> getMappingsForSearchScoreField(
@Nonnull final SearchScoreFieldSpec searchScoreFieldSpec) {
return ImmutableMap.of(searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(),

View File

@ -121,13 +121,14 @@ public class ReindexConfig {
if (super.exists) {
/* Consider mapping changes */
MapDifference<String, Object> mappingsDiff = Maps.difference(
(TreeMap<String, Object>) super.currentMappings.getOrDefault("properties", new TreeMap()),
(TreeMap<String, Object>) super.targetMappings.getOrDefault("properties", new TreeMap()));
getOrDefault(super.currentMappings, List.of("properties")),
getOrDefault(super.targetMappings, List.of("properties")));
super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty()
|| !mappingsDiff.entriesOnlyOnRight().isEmpty();
super.isPureMappingsAddition = super.requiresApplyMappings
&& mappingsDiff.entriesDiffering().isEmpty()
&& !mappingsDiff.entriesOnlyOnRight().isEmpty();
if (super.requiresApplyMappings && super.isPureMappingsAddition) {
log.info("Index: {} - New fields have been added to index. Adding: {}",
super.name, mappingsDiff.entriesOnlyOnRight());
@ -171,8 +172,21 @@ public class ReindexConfig {
return super.build();
}
private static TreeMap<String, Object> getOrDefault(Map<String, Object> map, List<String> path) {
if (map == null) {
return new TreeMap<>();
}
TreeMap<String, Object> item = (TreeMap<String, Object>) map.getOrDefault(path.get(0), new TreeMap());
if (path.size() == 1) {
return item;
} else {
return getOrDefault(item, path.subList(1, path.size()));
}
}
private boolean isAnalysisEqual() {
if (!super.targetSettings.containsKey("index")) {
if (super.targetSettings == null || !super.targetSettings.containsKey("index")) {
return true;
}
Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index");
@ -186,7 +200,7 @@ public class ReindexConfig {
}
private boolean isSettingsEqual() {
if (!super.targetSettings.containsKey("index")) {
if (super.targetSettings == null || !super.targetSettings.containsKey("index")) {
return true;
}
Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index");
@ -196,7 +210,7 @@ public class ReindexConfig {
}
private boolean isSettingsReindexRequired() {
if (!super.targetSettings.containsKey("index")) {
if (super.targetSettings == null || !super.targetSettings.containsKey("index")) {
return false;
}
Map<String, Object> indexSettings = (Map<String, Object>) super.targetSettings.get("index");

View File

@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.schema.DataSchema;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.metadata.models.SearchScoreFieldSpec;
@ -21,6 +22,7 @@ import java.util.Optional;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import javax.annotation.Nonnull;
@ -30,6 +32,7 @@ import javax.annotation.Nonnull;
* Class that provides a utility function that transforms the snapshot object into a search document
*/
@Slf4j
@Setter
@RequiredArgsConstructor
public class SearchDocumentTransformer {
@ -42,6 +45,8 @@ public class SearchDocumentTransformer {
// Maximum customProperties value length
private final int maxValueLength;
private SystemEntityClient entityClient;
private static final String BROWSE_PATH_V2_DELIMITER = "";
public Optional<String> transformSnapshot(final RecordTemplate snapshot, final EntitySpec entitySpec,
@ -72,14 +77,18 @@ public class SearchDocumentTransformer {
FieldExtractor.extractFields(aspect, aspectSpec.getSearchableFieldSpecs(), maxValueLength);
final Map<SearchScoreFieldSpec, List<Object>> extractedSearchScoreFields =
FieldExtractor.extractFields(aspect, aspectSpec.getSearchScoreFieldSpecs(), maxValueLength);
if (extractedSearchableFields.isEmpty() && extractedSearchScoreFields.isEmpty()) {
return Optional.empty();
Optional<String> result = Optional.empty();
if (!extractedSearchableFields.isEmpty() || !extractedSearchScoreFields.isEmpty()) {
final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode();
searchDocument.put("urn", urn.toString());
extractedSearchableFields.forEach((key, values) -> setSearchableValue(key, values, searchDocument, forDelete));
extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete));
result = Optional.of(searchDocument.toString());
}
final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode();
searchDocument.put("urn", urn.toString());
extractedSearchableFields.forEach((key, values) -> setSearchableValue(key, values, searchDocument, forDelete));
extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete));
return Optional.of(searchDocument.toString());
return result;
}
public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List<Object> fieldValues,

View File

@ -12,6 +12,7 @@ import com.linkedin.common.urn.UrnUtils;
import com.linkedin.data.template.RecordTemplate;
import com.linkedin.dataset.FineGrainedLineage;
import com.linkedin.dataset.UpstreamLineage;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.graph.Edge;
@ -28,6 +29,7 @@ import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.query.filter.RelationshipDirection;
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
import com.linkedin.metadata.search.utils.SearchUtils;
import com.linkedin.metadata.systemmetadata.SystemMetadataService;
@ -39,6 +41,8 @@ import com.linkedin.mxe.GenericAspect;
import com.linkedin.mxe.MetadataChangeLog;
import com.linkedin.mxe.SystemMetadata;
import com.linkedin.util.Pair;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
@ -68,6 +72,7 @@ public class UpdateIndicesService {
private final SystemMetadataService _systemMetadataService;
private final EntityRegistry _entityRegistry;
private final SearchDocumentTransformer _searchDocumentTransformer;
private final EntityIndexBuilders _entityIndexBuilders;
@Value("${featureFlags.graphServiceDiffModeEnabled:true}")
private boolean _graphDiffMode;
@ -90,25 +95,31 @@ public class UpdateIndicesService {
}
public UpdateIndicesService(
GraphService graphService,
EntitySearchService entitySearchService,
TimeseriesAspectService timeseriesAspectService,
SystemMetadataService systemMetadataService,
EntityRegistry entityRegistry,
SearchDocumentTransformer searchDocumentTransformer) {
GraphService graphService,
EntitySearchService entitySearchService,
TimeseriesAspectService timeseriesAspectService,
SystemMetadataService systemMetadataService,
EntityRegistry entityRegistry,
SearchDocumentTransformer searchDocumentTransformer,
EntityIndexBuilders entityIndexBuilders) {
_graphService = graphService;
_entitySearchService = entitySearchService;
_timeseriesAspectService = timeseriesAspectService;
_systemMetadataService = systemMetadataService;
_entityRegistry = entityRegistry;
_searchDocumentTransformer = searchDocumentTransformer;
_entityIndexBuilders = entityIndexBuilders;
}
public void handleChangeEvent(@Nonnull final MetadataChangeLog event) {
if (UPDATE_CHANGE_TYPES.contains(event.getChangeType())) {
handleUpdateChangeEvent(event);
} else if (event.getChangeType() == ChangeType.DELETE) {
handleDeleteChangeEvent(event);
try {
if (UPDATE_CHANGE_TYPES.contains(event.getChangeType())) {
handleUpdateChangeEvent(event);
} else if (event.getChangeType() == ChangeType.DELETE) {
handleDeleteChangeEvent(event);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@ -123,7 +134,7 @@ public class UpdateIndicesService {
*
* @param event the change event to be processed.
*/
public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) {
public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) throws IOException {
final EntitySpec entitySpec = getEventEntitySpec(event);
final Urn urn = EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec());
@ -212,7 +223,7 @@ public class UpdateIndicesService {
if (!aspectSpec.isTimeseries()) {
deleteSystemMetadata(urn, aspectSpec, isDeletingKey);
deleteGraphData(urn, aspectSpec, aspect, isDeletingKey, event);
deleteSearchData(urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey);
deleteSearchData(_entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey);
}
}
@ -405,7 +416,8 @@ public class UpdateIndicesService {
/**
* Process snapshot and update search index
*/
private void updateSearchService(String entityName, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect,
private void updateSearchService(String entityName, Urn urn,
AspectSpec aspectSpec, RecordTemplate aspect,
@Nullable SystemMetadata systemMetadata, @Nullable RecordTemplate previousAspect) {
Optional<String> searchDocument;
Optional<String> previousSearchDocument = Optional.empty();
@ -513,7 +525,8 @@ public class UpdateIndicesService {
}
}
private void deleteSearchData(Urn urn, String entityName, AspectSpec aspectSpec, RecordTemplate aspect, Boolean isKeyAspect) {
private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, String entityName,
AspectSpec aspectSpec, RecordTemplate aspect, Boolean isKeyAspect) {
String docId;
try {
docId = URLEncoder.encode(urn.toString(), "UTF-8");
@ -551,4 +564,13 @@ public class UpdateIndicesService {
event.getEntityType()));
}
}
/**
* Allow internal use of the system entity client. Solves recursive dependencies between the UpdateIndicesService
* and the SystemJavaEntityClient
* @param systemEntityClient system entity client
*/
public void setSystemEntityClient(SystemEntityClient systemEntityClient) {
_searchDocumentTransformer.setEntityClient(systemEntityClient);
}
}

View File

@ -11,7 +11,7 @@ public interface ElasticSearchIndexed {
* The index configurations for the given service.
* @return List of reindex configurations
*/
List<ReindexConfig> getReindexConfigs() throws IOException;
List<ReindexConfig> buildReindexConfigs() throws IOException;
/**
* Mirrors the service's functions which

View File

@ -205,7 +205,7 @@ public class ElasticSearchSystemMetadataService implements SystemMetadataService
public void configure() {
log.info("Setting up system metadata index");
try {
for (ReindexConfig config : getReindexConfigs()) {
for (ReindexConfig config : buildReindexConfigs()) {
_indexBuilder.buildIndex(config);
}
} catch (IOException ie) {
@ -214,7 +214,7 @@ public class ElasticSearchSystemMetadataService implements SystemMetadataService
}
@Override
public List<ReindexConfig> getReindexConfigs() throws IOException {
public List<ReindexConfig> buildReindexConfigs() throws IOException {
return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME),
SystemMetadataMappingsBuilder.getMappings(), Collections.emptyMap()));
}

View File

@ -137,9 +137,10 @@ public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectSer
}
@Override
public List<ReindexConfig> getReindexConfigs() {
return _indexBuilders.getReindexConfigs();
public List<ReindexConfig> buildReindexConfigs() {
return _indexBuilders.buildReindexConfigs();
}
public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options)
throws Exception {
return _indexBuilders.reindexAsync(index, filterQuery, options);

View File

@ -29,7 +29,7 @@ public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed {
@Override
public void reindexAll() {
for (ReindexConfig config : getReindexConfigs()) {
for (ReindexConfig config : buildReindexConfigs()) {
try {
_indexBuilder.buildIndex(config);
} catch (IOException e) {
@ -63,7 +63,7 @@ public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed {
}
@Override
public List<ReindexConfig> getReindexConfigs() {
public List<ReindexConfig> buildReindexConfigs() {
return _entityRegistry.getEntitySpecs().values().stream()
.flatMap(entitySpec -> entitySpec.getAspectSpecs().stream()
.map(aspectSpec -> Pair.of(entitySpec, aspectSpec)))
@ -80,4 +80,5 @@ public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed {
}
}).collect(Collectors.toList());
}
}

View File

@ -1,18 +1,27 @@
package com.linkedin.metadata.entity;
import com.linkedin.common.urn.Urn;
import com.linkedin.metadata.AspectIngestionUtils;
import com.linkedin.metadata.config.PreProcessHooks;
import com.linkedin.metadata.EbeanTestUtils;
import com.linkedin.metadata.entity.ebean.EbeanAspectDao;
import com.linkedin.metadata.entity.ebean.EbeanRetentionService;
import com.linkedin.metadata.event.EventProducer;
import com.linkedin.metadata.key.CorpUserKey;
import com.linkedin.metadata.models.registry.EntityRegistryException;
import com.linkedin.metadata.service.UpdateIndicesService;
import io.ebean.Database;
import org.testng.Assert;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.linkedin.metadata.Constants.*;
import static org.mockito.Mockito.*;
import static org.testng.Assert.*;
public class EbeanAspectMigrationsDaoTest extends AspectMigrationsDaoTest<EbeanAspectDao> {
@ -37,13 +46,19 @@ public class EbeanAspectMigrationsDaoTest extends AspectMigrationsDaoTest<EbeanA
_migrationsDao = dao;
}
/**
* Ideally, all tests would be in the base class, so they're reused between all implementations.
* When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test
* to make sure this class will always be discovered.
*/
@Test
public void obligatoryTest() throws AssertionError {
Assert.assertTrue(true);
public void testStreamAspects() throws AssertionError {
final int totalAspects = 30;
Map<Urn, CorpUserKey> ingestedAspects =
AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects);
List<String> ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList());
Stream<EntityAspect> aspectStream = _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME);
List<EntityAspect> aspectList = aspectStream.collect(Collectors.toList());
assertEquals(ingestedUrns.size(), aspectList.size());
Set<String> urnsFetched = aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet());
for (String urn : ingestedUrns) {
assertTrue(urnsFetched.contains(urn));
}
}
}

View File

@ -12,11 +12,16 @@ import com.linkedin.data.template.RecordTemplate;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.glossary.GlossaryTermInfo;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.config.PreProcessHooks;
import com.linkedin.metadata.entity.AspectDao;
import com.linkedin.metadata.entity.AspectUtils;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.entity.EntityServiceImpl;
import com.linkedin.metadata.event.EventProducer;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.EntitySpec;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.service.UpdateIndicesService;
import com.linkedin.metadata.utils.EntityKeyUtils;
import com.linkedin.metadata.utils.GenericRecordUtils;
import net.datafaker.Faker;
@ -42,6 +47,8 @@ import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import static org.mockito.Mockito.mock;
public class DataGenerator {
private final static Faker FAKER = new Faker();
private final EntityRegistry entityRegistry;
@ -52,10 +59,21 @@ public class DataGenerator {
this.entityRegistry = entityService.getEntityRegistry();
}
public static DataGenerator build(EntityRegistry entityRegistry) {
EntityServiceImpl mockEntityServiceImpl = new EntityServiceImpl(mock(AspectDao.class),
mock(EventProducer.class), entityRegistry, false,
mock(UpdateIndicesService.class), mock(PreProcessHooks.class));
return new DataGenerator(mockEntityServiceImpl);
}
public Stream<List<MetadataChangeProposal>> generateDatasets() {
return generateMCPs("dataset", 10, List.of());
}
public List<MetadataChangeProposal> generateTags(long count) {
return generateMCPs("tag", count, List.of()).findFirst().get();
}
public Stream<List<MetadataChangeProposal>> generateMCPs(String entityName, long count, List<String> aspects) {
EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName);
@ -127,9 +145,7 @@ public class DataGenerator {
public Map<String, BiFunction<RecordTemplate, Integer, List<MetadataChangeProposal>>> nestedRandomAspectGenerators = Map.of(
"globalTags", (aspect, count) -> {
try {
List<MetadataChangeProposal> tags = generateMCPs("tag", count, List.of())
.map(mcps -> mcps.get(0))
.collect(Collectors.toList());
List<MetadataChangeProposal> tags = generateTags(count);
Method setTagsMethod = aspect.getClass().getMethod("setTags", TagAssociationArray.class);
TagAssociationArray tagAssociations = new TagAssociationArray();
tagAssociations.addAll(tags.stream().map(

View File

@ -3,4 +3,4 @@ management.endpoints.web.exposure.include=metrics, health, info
spring.mvc.servlet.path=/
management.health.elasticsearch.enabled=false
management.health.neo4j.enabled=false
entityClient.preferredImpl=restli

View File

@ -7,6 +7,7 @@ import com.linkedin.metadata.entity.EntityServiceImpl;
import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.models.registry.ConfigEntityRegistry;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService;
import io.ebean.Database;
import org.springframework.boot.test.context.TestConfiguration;
@ -40,4 +41,7 @@ public class MaeConsumerApplicationTestConfiguration {
@MockBean
private ConfigEntityRegistry _configEntityRegistry;
@MockBean
public EntityIndexBuilders entityIndexBuilders;
}

View File

@ -14,6 +14,8 @@ import com.linkedin.metadata.kafka.hook.siblings.SiblingAssociationHook;
import com.linkedin.metadata.utils.metrics.MetricUtils;
import com.linkedin.mxe.MetadataChangeLog;
import com.linkedin.mxe.Topics;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import lombok.Getter;
@ -47,7 +49,10 @@ public class MetadataChangeLogProcessor {
@Autowired
public MetadataChangeLogProcessor(List<MetadataChangeLogHook> metadataChangeLogHooks) {
this.hooks = metadataChangeLogHooks.stream().filter(MetadataChangeLogHook::isEnabled).collect(Collectors.toList());
this.hooks = metadataChangeLogHooks.stream()
.filter(MetadataChangeLogHook::isEnabled)
.sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder))
.collect(Collectors.toList());
this.hooks.forEach(MetadataChangeLogHook::init);
}

View File

@ -29,4 +29,12 @@ public interface MetadataChangeLogHook {
* Invoke the hook when a MetadataChangeLog is received
*/
void invoke(@Nonnull MetadataChangeLog log) throws Exception;
/**
* Controls hook execution ordering
* @return order to execute
*/
default int executionOrder() {
return 100;
}
}

View File

@ -24,7 +24,7 @@ import static com.linkedin.metadata.Constants.*;
EntityRegistryFactory.class, SystemMetadataServiceFactory.class, SearchDocumentTransformerFactory.class})
public class UpdateIndicesHook implements MetadataChangeLogHook {
private final UpdateIndicesService _updateIndicesService;
protected final UpdateIndicesService _updateIndicesService;
private final boolean _isEnabled;
public UpdateIndicesHook(

View File

@ -34,6 +34,7 @@ import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.query.filter.RelationshipDirection;
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
import com.linkedin.metadata.service.UpdateIndicesService;
import com.linkedin.metadata.systemmetadata.SystemMetadataService;
@ -42,10 +43,12 @@ import com.linkedin.metadata.utils.GenericRecordUtils;
import com.linkedin.mxe.MetadataChangeLog;
import com.linkedin.mxe.SystemMetadata;
import com.linkedin.schema.SchemaField;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Value;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -82,9 +85,13 @@ public class UpdateIndicesHookTest {
private SearchDocumentTransformer _searchDocumentTransformer;
private DataHubUpgradeKafkaListener _mockDataHubUpgradeKafkaListener;
private ConfigurationProvider _mockConfigurationProvider;
private EntityIndexBuilders _mockEntityIndexBuilders;
private Urn _actorUrn;
private UpdateIndicesService _updateIndicesService;
@Value("${elasticsearch.index.maxArrayLength}")
private int maxArrayLength;
@BeforeMethod
public void setupTest() {
_actorUrn = UrnUtils.getUrn(TEST_ACTOR_URN);
@ -95,6 +102,8 @@ public class UpdateIndicesHookTest {
_searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000);
_mockDataHubUpgradeKafkaListener = Mockito.mock(DataHubUpgradeKafkaListener.class);
_mockConfigurationProvider = Mockito.mock(ConfigurationProvider.class);
_mockEntityIndexBuilders = Mockito.mock(EntityIndexBuilders.class);
ElasticSearchConfiguration elasticSearchConfiguration = new ElasticSearchConfiguration();
SystemUpdateConfiguration systemUpdateConfiguration = new SystemUpdateConfiguration();
systemUpdateConfiguration.setWaitForSystemUpdate(false);
@ -105,7 +114,8 @@ public class UpdateIndicesHookTest {
_mockTimeseriesAspectService,
_mockSystemMetadataService,
ENTITY_REGISTRY,
_searchDocumentTransformer
_searchDocumentTransformer,
_mockEntityIndexBuilders
);
_updateIndicesHook = new UpdateIndicesHook(
_updateIndicesService,
@ -163,7 +173,8 @@ public class UpdateIndicesHookTest {
_mockTimeseriesAspectService,
_mockSystemMetadataService,
mockEntityRegistry,
_searchDocumentTransformer
_searchDocumentTransformer,
_mockEntityIndexBuilders
);
_updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true);

View File

@ -9,6 +9,7 @@ import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.registry.SchemaRegistryService;
import com.linkedin.metadata.search.elasticsearch.ElasticSearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
import com.linkedin.metadata.systemmetadata.SystemMetadataService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
@ -64,4 +65,7 @@ public class MCLSpringTestConfiguration {
@MockBean
public SchemaRegistryService schemaRegistryService;
@MockBean
public EntityIndexBuilders entityIndexBuilders;
}

View File

@ -8,6 +8,7 @@ import com.linkedin.metadata.graph.SiblingGraphService;
import com.linkedin.metadata.models.registry.ConfigEntityRegistry;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.restli.DefaultRestliClientFactory;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.parseq.retry.backoff.ExponentialBackoff;
import com.linkedin.restli.client.Client;
@ -57,4 +58,7 @@ public class MceConsumerApplicationTestConfiguration {
@MockBean
protected SiblingGraphService siblingGraphService;
@MockBean
public EntityIndexBuilders entityIndexBuilders;
}

View File

@ -339,7 +339,7 @@ cache:
statsEnabled: ${CACHE_CLIENT_ENTITY_CLIENT_STATS_ENABLED:true}
statsIntervalSeconds: ${CACHE_CLIENT_ENTITY_CLIENT_STATS_INTERVAL_SECONDS:120}
defaultTTLSeconds: ${CACHE_CLIENT_ENTITY_CLIENT_TTL_SECONDS:0} # do not cache entity/aspects by default
maxBytes: ${CACHE_CLIENT_USAGE_ENTITY_MAX_BYTES:104857600} # 100MB
maxBytes: ${CACHE_CLIENT_ENTITY_CLIENT_MAX_BYTES:104857600} # 100MB
entityAspectTTLSeconds:
# cache user aspects for 20s
corpuser:

View File

@ -33,17 +33,19 @@ public class EntityServiceFactory {
TopicConventionFactory.TOPIC_CONVENTION_BEAN, "entityRegistry"})
@Nonnull
protected EntityService createInstance(
Producer<String, ? extends IndexedRecord> producer,
TopicConvention convention,
KafkaHealthChecker kafkaHealthChecker,
@Qualifier("entityAspectDao") AspectDao aspectDao,
EntityRegistry entityRegistry,
ConfigurationProvider configurationProvider,
UpdateIndicesService updateIndicesService) {
Producer<String, ? extends IndexedRecord> producer,
TopicConvention convention,
KafkaHealthChecker kafkaHealthChecker,
@Qualifier("entityAspectDao") AspectDao aspectDao,
EntityRegistry entityRegistry,
ConfigurationProvider configurationProvider,
UpdateIndicesService updateIndicesService) {
final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker);
FeatureFlags featureFlags = configurationProvider.getFeatureFlags();
return new EntityServiceImpl(aspectDao, eventProducer, entityRegistry,
EntityService entityService = new EntityServiceImpl(aspectDao, eventProducer, entityRegistry,
featureFlags.isAlwaysEmitChangeLog(), updateIndicesService, featureFlags.getPreProcessHooks(), _ebeanMaxTransactionRetry);
return entityService;
}
}

View File

@ -16,14 +16,17 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@Configuration
@ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')")
@Import({DataHubKafkaProducerFactory.class})
public class JavaEntityClientFactory {
@Autowired
@Qualifier("entityService")
private EntityService _entityService;
@ -74,7 +77,7 @@ public class JavaEntityClientFactory {
public SystemJavaEntityClient systemJavaEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider,
@Qualifier("systemAuthentication") final Authentication systemAuthentication,
@Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) {
return new SystemJavaEntityClient(
SystemJavaEntityClient systemJavaEntityClient = new SystemJavaEntityClient(
_entityService,
_deleteEntityService,
_entitySearchService,
@ -86,5 +89,9 @@ public class JavaEntityClientFactory {
restliEntityClient,
systemAuthentication,
configurationProvider.getCache().getClient().getEntityClient());
_entityService.setSystemEntityClient(systemJavaEntityClient);
return systemJavaEntityClient;
}
}

View File

@ -1,24 +1,44 @@
package com.linkedin.gms.factory.entity.update.indices;
import com.linkedin.entity.client.SystemRestliEntityClient;
import com.linkedin.gms.factory.search.EntityIndexBuildersFactory;
import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
import com.linkedin.metadata.service.UpdateIndicesService;
import com.linkedin.metadata.systemmetadata.SystemMetadataService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@Configuration
@Import(EntityIndexBuildersFactory.class)
public class UpdateIndicesServiceFactory {
@Autowired
private ApplicationContext context;
@Value("${entityClient.preferredImpl:java}")
private String entityClientImpl;
@Bean
public UpdateIndicesService updateIndicesService(GraphService graphService, EntitySearchService entitySearchService,
TimeseriesAspectService timeseriesAspectService, SystemMetadataService systemMetadataService,
EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer) {
return new UpdateIndicesService(graphService, entitySearchService, timeseriesAspectService,
systemMetadataService, entityRegistry, searchDocumentTransformer);
TimeseriesAspectService timeseriesAspectService,
SystemMetadataService systemMetadataService,
EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer,
EntityIndexBuilders entityIndexBuilders) {
UpdateIndicesService updateIndicesService = new UpdateIndicesService(graphService, entitySearchService, timeseriesAspectService,
systemMetadataService, entityRegistry, searchDocumentTransformer, entityIndexBuilders);
if ("restli".equals(entityClientImpl)) {
updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class));
}
return updateIndicesService;
}
}

View File

@ -47,6 +47,9 @@ public class ElasticSearchServiceFactory {
@Qualifier("settingsBuilder")
private SettingsBuilder settingsBuilder;
@Autowired
private EntityIndexBuilders entityIndexBuilders;
@Autowired
private ConfigurationProvider configurationProvider;
@ -64,9 +67,7 @@ public class ElasticSearchServiceFactory {
new ESSearchDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(),
configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(),
elasticSearchConfiguration.getImplementation(), searchConfiguration, customSearchConfiguration);
return new ElasticSearchService(
new EntityIndexBuilders(components.getIndexBuilder(), entityRegistry, components.getIndexConvention(),
settingsBuilder), esSearchDAO,
return new ElasticSearchService(entityIndexBuilders, esSearchDAO,
new ESBrowseDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(),
searchConfiguration, customSearchConfiguration),
new ESWriteDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(),

View File

@ -0,0 +1,35 @@
package com.linkedin.gms.factory.search;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class EntityIndexBuildersFactory {
@Autowired
@Qualifier("baseElasticSearchComponents")
private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components;
@Autowired
@Qualifier("entityRegistry")
private EntityRegistry entityRegistry;
@Autowired
@Qualifier("settingsBuilder")
private SettingsBuilder settingsBuilder;
@Bean
protected EntityIndexBuilders entityIndexBuilders() {
return new EntityIndexBuilders(components.getIndexBuilder(), entityRegistry, components.getIndexConvention(), settingsBuilder);
}
}

View File

@ -21,7 +21,6 @@ import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName;
@ -44,8 +43,7 @@ public class EntityClientCache {
if (config.isEnabled()) {
Set<Key> keys = urns.stream()
.flatMap(urn -> aspectNames.stream()
.map(a -> Key.builder().urn(urn).aspectName(a).build()))
.flatMap(urn -> aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build()))
.collect(Collectors.toSet());
Map<Key, EnvelopedAspect> envelopedAspects = cache.getAll(keys);
@ -92,13 +90,13 @@ public class EntityClientCache {
Map<String, Set<Key>> keysByEntity = StreamSupport.stream(keys.spliterator(), true)
.collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet()));
Stream<Map.Entry<Key, EnvelopedAspect>> results = keysByEntity.entrySet().parallelStream()
Map<Key, EnvelopedAspect> results = keysByEntity.entrySet().parallelStream()
.flatMap(entry -> {
Set<Urn> urns = entry.getValue().stream()
.map(Key::getUrn)
.collect(Collectors.toSet());
Set<String> aspects = entry.getValue().stream()
.map(Key::getEntityName)
.map(Key::getAspectName)
.collect(Collectors.toSet());
return loadFunction.apply(urns, aspects).entrySet().stream();
})
@ -106,9 +104,9 @@ public class EntityClientCache {
.map(envAspect -> {
Key key = Key.builder().urn(resp.getKey()).aspectName(envAspect.getName()).build();
return Map.entry(key, envAspect);
}));
})).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
return results.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
return results;
};
// ideally the cache time comes from caching headers from service, but configuration driven for now

View File

@ -9,6 +9,7 @@ import com.linkedin.data.template.RecordTemplate;
import com.linkedin.entity.Entity;
import com.linkedin.entity.EntityResponse;
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.events.metadata.ChangeType;
import com.linkedin.metadata.aspect.VersionedAspect;
import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs;
@ -297,4 +298,11 @@ public interface EntityService {
*/
@Nonnull
BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException;
/**
* Allow internal use of the system entity client. Solves recursive dependencies between the EntityService
* and the SystemJavaEntityClient
* @param systemEntityClient system entity client
*/
void setSystemEntityClient(SystemEntityClient systemEntityClient);
}