fix(spring): refactor spring configuration (#10290)

This commit is contained in:
david-leifker 2024-04-16 13:50:41 -05:00 committed by GitHub
parent 3ac8778fe4
commit f36a597b17
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
138 changed files with 598 additions and 626 deletions

View File

@ -2,12 +2,14 @@ buildscript {
ext.jdkVersionDefault = 17
ext.javaClassVersionDefault = 11
def springModules = ['mae-consumer', 'mce-consumer', 'pe-consumer']
ext.jdkVersion = { p ->
// If Spring 6 is present, hard dependency on jdk17
if (p.configurations.any { it.getDependencies().any{
(it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6."))
|| (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test"))
}}) {
}} || springModules.contains(p.name)) {
return 17
} else {
// otherwise we can use the preferred default which can be overridden with a property: -PjdkVersionDefault
@ -20,7 +22,7 @@ buildscript {
if (p.configurations.any { it.getDependencies().any {
(it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6."))
|| (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test"))
}}) {
}} || springModules.contains(p.name)) {
return 17
} else {
// otherwise we can use the preferred default which can be overridden with a property: -PjavaClassVersionDefault
@ -158,6 +160,7 @@ project.ext.externalDependency = [
'javatuples': 'org.javatuples:javatuples:1.2',
'javaxInject' : 'javax.inject:javax.inject:1',
'javaxValidation' : 'javax.validation:validation-api:2.0.1.Final',
'jakartaValidation': 'jakarta.validation:jakarta.validation-api:3.1.0-M2',
'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.41',
'jerseyGuava': 'org.glassfish.jersey.bundles.repackaged:jersey-guava:2.25.1',
'jettyJaas': "org.eclipse.jetty:jetty-jaas:$jettyVersion",
@ -266,6 +269,7 @@ project.ext.externalDependency = [
'jline':'jline:jline:1.4.1',
'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0',
'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2',
'jakartaAnnotationApi': 'jakarta.annotation:jakarta.annotation-api:3.0.0',
'classGraph': 'io.github.classgraph:classgraph:4.8.168',
]

View File

@ -6,16 +6,19 @@ import com.linkedin.metadata.config.cache.CacheConfiguration;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import lombok.Data;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Component;
/**
* Minimal sharing between metadata-service and frontend Does not use the factories module to avoid
* transitive dependencies.
*/
@EnableConfigurationProperties
@PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class)
@PropertySource(value = "classpath:/application.yaml", factory = YamlPropertySourceFactory.class)
@ConfigurationProperties
@Data
public class ConfigurationProvider {

View File

@ -5,7 +5,6 @@ import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory;
import com.linkedin.gms.factory.graphql.GraphQLEngineFactory;
import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory;
import com.linkedin.gms.factory.kafka.SimpleKafkaConsumerFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@ -31,7 +30,6 @@ import org.springframework.context.annotation.FilterType;
DataHubAuthorizerFactory.class,
SimpleKafkaConsumerFactory.class,
KafkaEventConsumerFactory.class,
InternalSchemaRegistryFactory.class,
GraphQLEngineFactory.class
})
})

View File

@ -6,9 +6,9 @@ import com.linkedin.datahub.upgrade.system.SystemUpdate;
import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking;
import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking;
import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep;
import com.linkedin.gms.factory.common.TopicConventionFactory;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory;
import com.linkedin.gms.factory.kafka.common.TopicConventionFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
@ -110,4 +110,14 @@ public class SystemUpdateConfig {
@Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) {
return kafkaEventProducer;
}
@Primary
@Bean(name = "schemaRegistryConfig")
@ConditionalOnProperty(
name = "kafka.schemaRegistry.type",
havingValue = InternalSchemaRegistryFactory.TYPE)
protected SchemaRegistryConfig schemaRegistryConfig(
@Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) {
return duheSchemaRegistryConfig;
}
}

View File

@ -1,13 +1,22 @@
package com.linkedin.datahub.upgrade;
import static com.linkedin.metadata.EventUtils.RENAMED_MCL_AVRO_SCHEMA;
import static com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer.topicToSubjectName;
import static org.mockito.Mockito.mock;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import com.linkedin.datahub.upgrade.system.SystemUpdate;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.boot.kafka.MockSystemUpdateDeserializer;
import com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer;
import com.linkedin.metadata.dao.producer.KafkaEventProducer;
import com.linkedin.metadata.entity.EntityServiceImpl;
import com.linkedin.mxe.Topics;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
import io.datahubproject.metadata.context.OperationContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@ -23,8 +32,8 @@ import org.testng.annotations.Test;
classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class},
properties = {
"kafka.schemaRegistry.type=INTERNAL",
"DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic",
"METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=test_mcl_versioned_topic"
"DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME,
"METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=" + Topics.METADATA_CHANGE_LOG_VERSIONED,
},
args = {"-u", "SystemUpdate"})
public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests {
@ -43,15 +52,29 @@ public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringCont
@Autowired private EntityServiceImpl entityService;
@Autowired
@Named("schemaRegistryConfig")
private SchemaRegistryConfig schemaRegistryConfig;
@Test
public void testSystemUpdateInit() {
assertNotNull(systemUpdate);
}
@Test
public void testSystemUpdateKafkaProducerOverride() {
public void testSystemUpdateKafkaProducerOverride() throws RestClientException, IOException {
assertEquals(schemaRegistryConfig.getDeserializer(), MockSystemUpdateDeserializer.class);
assertEquals(schemaRegistryConfig.getSerializer(), MockSystemUpdateSerializer.class);
assertEquals(kafkaEventProducer, duheKafkaEventProducer);
assertEquals(entityService.getProducer(), duheKafkaEventProducer);
MockSystemUpdateSerializer serializer = new MockSystemUpdateSerializer();
serializer.configure(schemaRegistryConfig.getProperties(), false);
SchemaRegistryClient registry = serializer.getSchemaRegistryClient();
assertEquals(
registry.getId(
topicToSubjectName(Topics.METADATA_CHANGE_LOG_VERSIONED), RENAMED_MCL_AVRO_SCHEMA),
2);
}
@Test

View File

@ -4,14 +4,21 @@ import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.testng.Assert.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager;
import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking;
import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.boot.kafka.MockSystemUpdateDeserializer;
import com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer;
import com.linkedin.metadata.dao.producer.KafkaEventProducer;
import com.linkedin.metadata.entity.AspectDao;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.entity.EntityServiceImpl;
import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs;
import com.linkedin.mxe.Topics;
import io.datahubproject.test.metadata.context.TestOperationContexts;
import java.util.List;
import javax.inject.Named;
@ -27,8 +34,8 @@ import org.testng.annotations.Test;
properties = {
"BOOTSTRAP_SYSTEM_UPDATE_DATA_JOB_NODE_CLL_ENABLED=true",
"kafka.schemaRegistry.type=INTERNAL",
"DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic",
"METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=test_mcl_versioned_topic"
"DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME,
"METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=" + Topics.METADATA_CHANGE_LOG_VERSIONED,
},
args = {"-u", "SystemUpdateNonBlocking"})
public class DatahubUpgradeNonBlockingTest extends AbstractTestNGSpringContextTests {
@ -38,9 +45,28 @@ public class DatahubUpgradeNonBlockingTest extends AbstractTestNGSpringContextTe
private SystemUpdateNonBlocking systemUpdateNonBlocking;
@Autowired
@Named("schemaRegistryConfig")
private SchemaRegistryConfig schemaRegistryConfig;
@Autowired
@Named("duheKafkaEventProducer")
private KafkaEventProducer duheKafkaEventProducer;
@Autowired
@Named("kafkaEventProducer")
private KafkaEventProducer kafkaEventProducer;
@Autowired private EntityServiceImpl entityService;
@Test
public void testSystemUpdateNonBlockingInit() {
assertNotNull(systemUpdateNonBlocking);
// Expected system update configuration and producer
assertEquals(schemaRegistryConfig.getDeserializer(), MockSystemUpdateDeserializer.class);
assertEquals(schemaRegistryConfig.getSerializer(), MockSystemUpdateSerializer.class);
assertEquals(duheKafkaEventProducer, kafkaEventProducer);
assertEquals(entityService.getProducer(), duheKafkaEventProducer);
}
@Test

View File

@ -1,18 +1,15 @@
package com.linkedin.datahub.upgrade;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.linkedin.gms.factory.auth.SystemAuthenticationFactory;
import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.models.registry.ConfigEntityRegistry;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.registry.SchemaRegistryService;
import com.linkedin.metadata.registry.SchemaRegistryServiceImpl;
import com.linkedin.metadata.search.SearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.mxe.TopicConventionImpl;
import io.ebean.Database;
import java.util.Optional;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
@ -38,8 +35,6 @@ public class UpgradeCliApplicationTestConfiguration {
@Bean
public SchemaRegistryService schemaRegistryService() {
SchemaRegistryService mockService = mock(SchemaRegistryService.class);
when(mockService.getSchemaIdForTopic(anyString())).thenReturn(Optional.of(0));
return mockService;
return new SchemaRegistryServiceImpl(new TopicConventionImpl());
}
}

View File

@ -42,6 +42,7 @@ tasks.register('quickstart') {}
tasks.register('quickstartSlim') {}
tasks.register('quickstartDebug') {}
tasks.register('quickstartPg') {}
tasks.register('quickstartStorage') {}
tasks.register('quickstartNuke') {
doFirst {
@ -133,6 +134,19 @@ dockerCompose {
stopContainers = false
removeVolumes = false
}
quickstartStorage {
isRequiredBy(tasks.named('quickstartStorage'))
composeAdditionalArgs = ['--profile', 'quickstart-storage']
useComposeFiles = ['profiles/docker-compose.yml']
projectName = 'datahub'
projectNamePrefix = ''
buildBeforeUp = false
buildBeforePull = false
stopContainers = false
removeVolumes = false
}
}
tasks.getByName('quickstartComposeUp').dependsOn(
quickstart_modules.collect { it + ':dockerTag' })

View File

@ -75,7 +75,7 @@ for configuring multiple ways to authenticate a given request, for example via L
Only if each Authenticator within the chain fails to authenticate a request will it be rejected.
The Authenticator Chain can be configured in the `application.yml` file under `authentication.authenticators`:
The Authenticator Chain can be configured in the `application.yaml` file under `authentication.authenticators`:
```
authentication:

View File

@ -53,7 +53,7 @@ To enable Metadata Service Authentication:
OR
- change the Metadata Service `application.yml` configuration file to set `authentication.enabled` to "true" AND
- change the Metadata Service `application.yaml` configuration file to set `authentication.enabled` to "true" AND
- change the Frontend Proxy Service `application.config` configuration file to set `metadataService.auth.enabled` to "true"
After setting the configuration flag, simply restart the Metadata Service to start enforcing Authentication.
@ -116,7 +116,7 @@ These changes represent the first milestone in Metadata Service Authentication.
That's perfectly fine, for now. Metadata Service Authentication is disabled by default, only enabled if you provide the
environment variable `METADATA_SERVICE_AUTH_ENABLED` to the `datahub-gms` container or change the `authentication.enabled` to "true"
inside your DataHub Metadata Service configuration (`application.yml`).
inside your DataHub Metadata Service configuration (`application.yaml`).
That being said, we will be recommending that you enable Authentication for production use cases, to prevent
arbitrary actors from ingesting metadata into DataHub.
@ -141,7 +141,7 @@ the root "datahub" user account.
### I want to authenticate requests using a custom Authenticator? How do I do this?
You can configure DataHub to add your custom **Authenticator** to the **Authentication Chain** by changing the `application.yml` configuration file for the Metadata Service:
You can configure DataHub to add your custom **Authenticator** to the **Authentication Chain** by changing the `application.yaml` configuration file for the Metadata Service:
```yml
authentication:

View File

@ -332,7 +332,7 @@ and [here](../../metadata-service/factories/src/main/java/com/linkedin/gms/facto
.
A mapping between the property name used in the above two files and the name used in docker/env file can be
found [here](../../metadata-service/configuration/src/main/resources/application.yml).
found [here](../../metadata-service/configuration/src/main/resources/application.yaml).
### Managed Streaming for Apache Kafka (MSK)

View File

@ -328,7 +328,7 @@ Helm with `--atomic`: In general, it is recommended to not use the `--atomic` se
- #6243 apache-ranger authorizer is no longer the core part of DataHub GMS, and it is shifted as plugin. Please refer updated documentation [Configuring Authorization with Apache Ranger](./configuring-authorization-with-apache-ranger.md#configuring-your-datahub-deployment) for configuring `apache-ranger-plugin` in DataHub GMS.
- #6243 apache-ranger authorizer as plugin is not supported in DataHub Kubernetes deployment.
- #6243 Authentication and Authorization plugins configuration are removed from [application.yml](../../metadata-service/configuration/src/main/resources/application.yml). Refer documentation [Migration Of Plugins From application.yml](../plugins.md#migration-of-plugins-from-applicationyml) for migrating any existing custom plugins.
- #6243 Authentication and Authorization plugins configuration are removed from [application.yaml](../../metadata-service/configuration/src/main/resources/application.yaml). Refer documentation [Migration Of Plugins From application.yaml](../plugins.md#migration-of-plugins-from-applicationyml) for migrating any existing custom plugins.
- `datahub check graph-consistency` command has been removed. It was a beta API that we had considered but decided there are better solutions for this. So removing this.
- `graphql_url` option of `powerbi-report-server` source deprecated as the options is not used.
- #6789 BigQuery ingestion: If `enable_legacy_sharded_table_support` is set to False, sharded table names will be suffixed with \_yyyymmdd to make sure they don't clash with non-sharded tables. This means if stateful ingestion is enabled then old sharded tables will be recreated with a new id and attached tags/glossary terms/etc will need to be added again. _This behavior is not enabled by default yet, but will be enabled by default in a future release._

View File

@ -252,11 +252,11 @@ All other access are forbidden for the plugin.
> Disclaimer: In BETA version your plugin can access any port and can read/write to any location on file system, however you should implement the plugin as per above access permission to keep your plugin compatible with upcoming release of DataHub.
## Migration Of Plugins From application.yml
If you have any custom Authentication or Authorization plugin define in `authorization` or `authentication` section of [application.yml](../metadata-service/configuration/src/main/resources/application.yml) then migrate them as per below steps.
## Migration Of Plugins From application.yaml
If you have any custom Authentication or Authorization plugin define in `authorization` or `authentication` section of [application.yaml](../metadata-service/configuration/src/main/resources/application.yaml) then migrate them as per below steps.
1. Implement Plugin: For Authentication Plugin follow steps of [Implementing an Authentication Plugin](#implementing-an-authentication-plugin) and for Authorization Plugin follow steps of [Implementing an Authorization Plugin](#implementing-an-authorization-plugin)
2. Install Plugin: Install the plugins as per steps mentioned in [Plugin Installation](#plugin-installation). Here you need to map the configuration from [application.yml](../metadata-service/configuration/src/main/resources/application.yml) to configuration in `config.yml`. This mapping from `application.yml` to `config.yml` is described below
2. Install Plugin: Install the plugins as per steps mentioned in [Plugin Installation](#plugin-installation). Here you need to map the configuration from [application.yaml](../metadata-service/configuration/src/main/resources/application.yaml) to configuration in `config.yml`. This mapping from `application.yaml` to `config.yml` is described below
**Mapping for Authenticators**

View File

@ -57,7 +57,7 @@ public class EventUtils {
private static final Schema ORIGINAL_MCP_AVRO_SCHEMA =
getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeProposal.avsc");
public static final Schema ORIGINAL_MCL_AVRO_SCHEMA =
private static final Schema ORIGINAL_MCL_AVRO_SCHEMA =
getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeLog.avsc");
private static final Schema ORIGINAL_FMCL_AVRO_SCHEMA =
@ -84,7 +84,7 @@ public class EventUtils {
private static final Schema RENAMED_MCP_AVRO_SCHEMA =
com.linkedin.pegasus2avro.mxe.MetadataChangeProposal.SCHEMA$;
private static final Schema RENAMED_MCL_AVRO_SCHEMA =
public static final Schema RENAMED_MCL_AVRO_SCHEMA =
com.linkedin.pegasus2avro.mxe.MetadataChangeLog.SCHEMA$;
private static final Schema RENAMED_FMCP_AVRO_SCHEMA =

View File

@ -14,7 +14,10 @@ import org.springframework.context.annotation.FilterType;
exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class})
@ComponentScan(
basePackages = {
"com.linkedin.gms.factory.common",
"com.linkedin.gms.factory.kafka",
"com.linkedin.gms.factory.kafka.common",
"com.linkedin.gms.factory.kafka.schemaregistry",
"com.linkedin.metadata.boot.kafka",
"com.linkedin.metadata.kafka",
"com.linkedin.metadata.dao.producer",

View File

@ -1,10 +1,6 @@
package com.linkedin.metadata.kafka;
import static com.linkedin.metadata.Constants.*;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.linkedin.gms.factory.common.GitVersionFactory;
import com.linkedin.metadata.version.GitVersion;
@ -21,27 +17,15 @@ public class MclConsumerConfig {
private final Map<String, Object> config;
private final String configJson;
private static final ObjectMapper OBJECT_MAPPER =
new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL);
static {
int maxSize =
Integer.parseInt(
System.getenv()
.getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE));
OBJECT_MAPPER
.getFactory()
.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build());
}
public MclConsumerConfig(GitVersion gitVersion) throws JsonProcessingException {
public MclConsumerConfig(final GitVersion gitVersion, final ObjectMapper objectMapper)
throws JsonProcessingException {
config = new HashMap<>();
config.put("noCode", "true");
Map<String, Object> versionConfig = new HashMap<>();
versionConfig.put("acryldata/datahub", gitVersion.toConfig());
config.put("versions", versionConfig);
configJson = OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(config);
configJson = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
}
@GetMapping("/config")

View File

@ -1,5 +1,5 @@
plugins {
id 'java'
id 'java-library'
}
apply plugin: 'pegasus'
@ -32,7 +32,7 @@ dependencies {
implementation externalDependency.neo4jJavaDriver
implementation externalDependency.springKafka
implementation externalDependency.springActuator
implementation externalDependency.annotationApi
implementation externalDependency.slf4jApi
compileOnly externalDependency.lombok

View File

@ -11,7 +11,7 @@ import com.linkedin.gms.factory.timeseries.TimeseriesAspectServiceFactory;
import com.linkedin.metadata.service.UpdateIndicesService;
import com.linkedin.mxe.MetadataChangeLog;
import io.datahubproject.metadata.context.OperationContext;
import jakarta.annotation.Nonnull;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Import;

View File

@ -24,7 +24,7 @@ import org.testng.annotations.Test;
"kafka.schemaRegistry.type=INTERNAL"
})
@TestPropertySource(
locations = "classpath:/application.yml",
locations = "classpath:/application.yaml",
properties = {"MCL_CONSUMER_ENABLED=true"})
@EnableAutoConfiguration(exclude = {CassandraAutoConfiguration.class})
public class MCLSpringTest extends AbstractTestNGSpringContextTests {

View File

@ -6,12 +6,10 @@ import static org.mockito.Mockito.when;
import com.datahub.authentication.Authentication;
import com.datahub.metadata.ingestion.IngestionScheduler;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.registry.SchemaRegistryService;
import com.linkedin.metadata.search.elasticsearch.ElasticSearchService;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
@ -36,6 +34,8 @@ import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@ComponentScan(
basePackages = {
"com.linkedin.metadata.kafka",
"com.linkedin.gms.factory.kafka.common",
"com.linkedin.gms.factory.kafka.schemaregistry",
"com.linkedin.gms.factory.entity.update.indices",
"com.linkedin.gms.factory.timeline.eventgenerator"
})
@ -68,14 +68,9 @@ public class MCLSpringTestConfiguration {
@MockBean(name = "dataHubUpgradeKafkaListener")
public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener;
@MockBean(name = "duheSchemaRegistryConfig")
public SchemaRegistryConfig schemaRegistryConfig;
@MockBean(name = "duheKafkaConsumerFactory")
public DefaultKafkaConsumerFactory<String, GenericRecord> defaultKafkaConsumerFactory;
@MockBean public SchemaRegistryService schemaRegistryService;
@MockBean public EntityIndexBuilders entityIndexBuilders;
@Bean(name = "systemOperationContext")

View File

@ -40,7 +40,7 @@ import org.springframework.context.annotation.PropertySource;
type = FilterType.ASSIGNABLE_TYPE,
classes = {ScheduledAnalyticsFactory.class})
})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@PropertySource(value = "classpath:/application.yaml", factory = YamlPropertySourceFactory.class)
public class MceConsumerApplication {
public static void main(String[] args) {

View File

@ -1,6 +1,6 @@
package com.linkedin.metadata.restli;
import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts;
import static com.linkedin.gms.factory.common.LocalEbeanConfigFactory.getListenerToTrackCounts;
import io.ebean.datasource.DataSourceConfig;
import java.util.HashMap;

View File

@ -1,5 +1,5 @@
plugins {
id 'java'
id 'java-library'
id 'pegasus'
}
@ -28,7 +28,6 @@ dependencies {
implementation externalDependency.protobuf
implementation externalDependency.springKafka
implementation externalDependency.springActuator
implementation externalDependency.slf4jApi
compileOnly externalDependency.lombok

View File

@ -6,8 +6,6 @@ import com.codahale.metrics.Timer;
import com.linkedin.entity.Entity;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory;
import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory;
import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory;
import com.linkedin.metadata.EventUtils;
import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition;
import com.linkedin.metadata.snapshot.Snapshot;
@ -38,11 +36,7 @@ import org.springframework.stereotype.Component;
@Slf4j
@Component
@Conditional(MetadataChangeProposalProcessorCondition.class)
@Import({
RestliEntityClientFactory.class,
KafkaEventConsumerFactory.class,
DataHubKafkaProducerFactory.class
})
@Import({RestliEntityClientFactory.class})
@EnableKafka
@RequiredArgsConstructor
public class MetadataChangeEventsProcessor {

View File

@ -5,8 +5,6 @@ import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.entityclient.RestliEntityClientFactory;
import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory;
import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory;
import com.linkedin.metadata.EventUtils;
import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition;
import com.linkedin.metadata.utils.metrics.MetricUtils;
@ -33,11 +31,7 @@ import org.springframework.stereotype.Component;
@Slf4j
@Component
@Import({
RestliEntityClientFactory.class,
KafkaEventConsumerFactory.class,
DataHubKafkaProducerFactory.class
})
@Import({RestliEntityClientFactory.class})
@Conditional(MetadataChangeProposalProcessorCondition.class)
@EnableKafka
@RequiredArgsConstructor

View File

@ -1,5 +1,5 @@
plugins {
id 'java'
id 'java-library'
id 'pegasus'
}
@ -17,7 +17,6 @@ dependencies {
exclude group: 'org.neo4j.test'
}
implementation externalDependency.springKafka
implementation externalDependency.springActuator
implementation externalDependency.slf4jApi
compileOnly externalDependency.lombok
annotationProcessor externalDependency.lombok

View File

@ -3,7 +3,7 @@ package com.datahub.authentication;
import java.util.List;
import lombok.Data;
/** POJO representing the "authentication" configuration block in application.yml. */
/** POJO representing the "authentication" configuration block in application.yaml. */
@Data
public class AuthenticationConfiguration {
/** Whether authentication is enabled */

View File

@ -5,7 +5,7 @@ import lombok.Data;
/**
* POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations
* provided in the application.yml.
* provided in the application.yaml.
*/
@Data
public class AuthenticatorConfiguration {

View File

@ -5,7 +5,7 @@ import com.datahub.plugins.auth.authorization.Authorizer;
import java.util.List;
import lombok.Data;
/** POJO representing the "authentication" configuration block in application.yml. */
/** POJO representing the "authentication" configuration block in application.yaml. */
@Data
public class AuthorizationConfiguration {
/** Configuration for the default DataHub Policies-based authorizer. */

View File

@ -4,7 +4,7 @@ import com.datahub.plugins.auth.authorization.Authorizer;
import java.util.Map;
import lombok.Data;
/** POJO representing {@link Authorizer} configurations provided in the application.yml. */
/** POJO representing {@link Authorizer} configurations provided in the application.yaml. */
@Data
public class AuthorizerConfiguration {
/** Whether to enable this authorizer */

View File

@ -28,7 +28,6 @@ import com.datahub.plugins.loader.PluginPermissionManagerImpl;
import com.google.common.collect.ImmutableMap;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.entity.EntityService;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import jakarta.servlet.Filter;
import jakarta.servlet.FilterChain;
@ -48,6 +47,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.context.support.SpringBeanAutowiringSupport;
@ -58,13 +58,13 @@ import org.springframework.web.context.support.SpringBeanAutowiringSupport;
@Slf4j
public class AuthenticationFilter implements Filter {
@Inject private ConfigurationProvider configurationProvider;
@Autowired private ConfigurationProvider configurationProvider;
@Inject
@Autowired
@Named("entityService")
private EntityService _entityService;
private EntityService<?> _entityService;
@Inject
@Autowired
@Named("dataHubTokenService")
private StatefulTokenService _tokenService;
@ -252,7 +252,7 @@ public class AuthenticationFilter implements Filter {
authenticatorChain.register(
systemAuthenticator); // Always register authenticator for internal system.
// Register authenticator define in application.yml
// Register authenticator define in application.yaml
final List<AuthenticatorConfiguration> authenticatorConfigurations =
this.configurationProvider.getAuthentication().getAuthenticators();
for (AuthenticatorConfiguration internalAuthenticatorConfig : authenticatorConfigurations) {

View File

@ -23,13 +23,12 @@ import com.linkedin.settings.global.OidcSettings;
import com.linkedin.settings.global.SsoSettings;
import io.datahubproject.metadata.context.OperationContext;
import io.datahubproject.metadata.services.SecretService;
import jakarta.inject.Inject;
import jakarta.inject.Named;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
@ -78,27 +77,29 @@ public class AuthServiceController {
private static final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm";
private static final String PREFERRED_JWS_ALGORITHM_2 = "preferredJwsAlgorithm2";
@Inject StatelessTokenService _statelessTokenService;
@Autowired private StatelessTokenService _statelessTokenService;
@Inject Authentication _systemAuthentication;
@Autowired private Authentication _systemAuthentication;
@Inject
@Autowired
@Qualifier("configurationProvider")
ConfigurationProvider _configProvider;
private ConfigurationProvider _configProvider;
@Inject NativeUserService _nativeUserService;
@Autowired private NativeUserService _nativeUserService;
@Inject EntityService _entityService;
@Autowired private EntityService<?> _entityService;
@Inject SecretService _secretService;
@Autowired private SecretService _secretService;
@Inject InviteTokenService _inviteTokenService;
@Autowired private InviteTokenService _inviteTokenService;
@Inject @Nullable TrackingService _trackingService;
@Autowired @Nullable private TrackingService _trackingService;
@Inject
@Named("systemOperationContext")
OperationContext systemOperationContext;
@Autowired private ObjectMapper mapper;
@Autowired
@Qualifier("systemOperationContext")
private OperationContext systemOperationContext;
/**
* Generates a JWT access token for as user UI session, provided a unique "user id" to generate
@ -117,7 +118,7 @@ public class AuthServiceController {
CompletableFuture<ResponseEntity<String>> generateSessionTokenForUser(
final HttpEntity<String> httpEntity) {
String jsonStr = httpEntity.getBody();
ObjectMapper mapper = new ObjectMapper();
JsonNode bodyJson = null;
try {
bodyJson = mapper.readTree(jsonStr);
@ -183,7 +184,7 @@ public class AuthServiceController {
@PostMapping(value = "/signUp", produces = "application/json;charset=utf-8")
CompletableFuture<ResponseEntity<String>> signUp(final HttpEntity<String> httpEntity) {
String jsonStr = httpEntity.getBody();
ObjectMapper mapper = new ObjectMapper();
JsonNode bodyJson;
try {
bodyJson = mapper.readTree(jsonStr);
@ -273,7 +274,7 @@ public class AuthServiceController {
CompletableFuture<ResponseEntity<String>> resetNativeUserCredentials(
final HttpEntity<String> httpEntity) {
String jsonStr = httpEntity.getBody();
ObjectMapper mapper = new ObjectMapper();
JsonNode bodyJson;
try {
bodyJson = mapper.readTree(jsonStr);
@ -332,7 +333,7 @@ public class AuthServiceController {
CompletableFuture<ResponseEntity<String>> verifyNativeUserCredentials(
final HttpEntity<String> httpEntity) {
String jsonStr = httpEntity.getBody();
ObjectMapper mapper = new ObjectMapper();
JsonNode bodyJson;
try {
bodyJson = mapper.readTree(jsonStr);
@ -377,7 +378,7 @@ public class AuthServiceController {
@PostMapping(value = "/track", produces = "application/json;charset=utf-8")
CompletableFuture<ResponseEntity<String>> track(final HttpEntity<String> httpEntity) {
String jsonStr = httpEntity.getBody();
ObjectMapper mapper = new ObjectMapper();
JsonNode bodyJson;
try {
bodyJson = mapper.readTree(jsonStr);

View File

@ -5,12 +5,14 @@ import com.datahub.authentication.invite.InviteTokenService;
import com.datahub.authentication.token.StatelessTokenService;
import com.datahub.authentication.user.NativeUserService;
import com.datahub.telemetry.TrackingService;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.entity.EntityService;
import io.datahubproject.metadata.context.OperationContext;
import io.datahubproject.metadata.services.SecretService;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
@TestConfiguration
public class AuthServiceTestConfiguration {
@ -33,4 +35,9 @@ public class AuthServiceTestConfiguration {
@MockBean InviteTokenService _inviteTokenService;
@MockBean TrackingService _trackingService;
@Bean
public ObjectMapper objectMapper() {
return new ObjectMapper();
}
}

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config;
import lombok.Data;
/** POJO representing the "datahub" configuration block in application.yml. */
/** POJO representing the "datahub" configuration block in application.yaml. */
@Data
public class DataHubConfiguration {
/**

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config;
import lombok.Data;
/** POJO representing the "ingestion" configuration block in application.yml. */
/** POJO representing the "ingestion" configuration block in application.yaml. */
@Data
public class IngestionConfiguration {
/** Whether managed ingestion is enabled */

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config;
import lombok.Data;
/** POJO representing the "tests" configuration block in application.yml.on.yml */
/** POJO representing the "tests" configuration block in application.yaml.on.yml */
@Data
public class TestsConfiguration {
/** Whether tests are enabled */

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config;
import lombok.Data;
/** POJO representing the "views" configuration block in application.yml.on.yml */
/** POJO representing the "views" configuration block in application.yaml.on.yml */
@Data
public class ViewsConfiguration {
/** Whether Views are enabled */

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config;
import lombok.Data;
/** POJO representing visualConfig block in the application.yml. */
/** POJO representing visualConfig block in the application.yaml. */
@Data
public class VisualConfiguration {
/** Asset related configurations */

View File

@ -17,7 +17,7 @@ public class CustomConfiguration {
private String file;
/**
* Materialize the search configuration from a location external to main application.yml
* Materialize the search configuration from a location external to main application.yaml
*
* @param mapper yaml enabled jackson mapper
* @return search configuration class

View File

@ -2,7 +2,7 @@ package com.linkedin.metadata.config.telemetry;
import lombok.Data;
/** POJO representing the "telemetry" configuration block in application.yml. */
/** POJO representing the "telemetry" configuration block in application.yaml. */
@Data
public class TelemetryConfiguration {
/** Whether cli telemetry is enabled */

View File

@ -8,7 +8,7 @@ import org.springframework.core.env.PropertySource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertySourceFactory;
/** Required for Spring to parse the application.yml provided by this module */
/** Required for Spring to parse the application.yaml provided by this module */
public class YamlPropertySourceFactory implements PropertySourceFactory {
@Override

View File

@ -14,6 +14,7 @@ dependencies {
implementation project(':metadata-service:restli-servlet-impl')
implementation project(':metadata-dao-impl:kafka-producer')
implementation project(':ingestion-scheduler')
implementation project(':metadata-service:schema-registry-api')
implementation (externalDependency.awsGlueSchemaRegistrySerde) {
exclude group: 'org.json', module: 'json'

View File

@ -19,7 +19,6 @@ import com.datahub.plugins.loader.PluginPermissionManagerImpl;
import com.google.common.collect.ImmutableMap;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.context.OperationContext;
import jakarta.annotation.Nonnull;
import java.nio.file.Path;
@ -35,12 +34,10 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({DataHubAuthorizerFactory.class})
public class AuthorizerChainFactory {
@Autowired

View File

@ -2,18 +2,15 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authorization.DataHubAuthorizer;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.context.OperationContext;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class DataHubAuthorizerFactory {
@Value("${authorization.defaultAuthorizer.cacheRefreshIntervalSecs}")

View File

@ -2,7 +2,6 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authentication.token.StatefulTokenService;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.context.OperationContext;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
@ -10,11 +9,9 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class DataHubTokenServiceFactory {
@Value("${authentication.tokenService.signingKey:}")

View File

@ -4,17 +4,14 @@ import com.datahub.authentication.group.GroupService;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.GraphClient;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class GroupServiceFactory {
@Autowired
@Qualifier("entityService")

View File

@ -2,18 +2,15 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authentication.invite.InviteTokenService;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.services.SecretService;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class InviteTokenServiceFactory {
@Autowired

View File

@ -4,18 +4,15 @@ import com.datahub.authentication.user.NativeUserService;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.services.SecretService;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class NativeUserServiceFactory {
@Autowired
@Qualifier("entityService")

View File

@ -2,16 +2,13 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authentication.post.PostService;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class PostServiceFactory {
@Bean(name = "postService")

View File

@ -2,16 +2,13 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authorization.role.RoleService;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class RoleServiceFactory {
@Bean(name = "roleService")

View File

@ -3,14 +3,12 @@ package com.linkedin.gms.factory.auth;
import com.datahub.authentication.Actor;
import com.datahub.authentication.ActorType;
import com.datahub.authentication.Authentication;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
/**
@ -19,7 +17,6 @@ import org.springframework.context.annotation.Scope;
*/
@Configuration
@ConfigurationProperties
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Data
public class SystemAuthenticationFactory {

View File

@ -8,17 +8,14 @@ import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO;
import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.models.registry.LineageRegistry;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class})
public class ElasticSearchGraphServiceFactory {
@Autowired

View File

@ -1,7 +1,6 @@
package com.linkedin.gms.factory.common;
import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.systemmetadata.ESSystemMetadataDAO;
import com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService;
import javax.annotation.Nonnull;
@ -10,10 +9,8 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({BaseElasticSearchComponentsFactory.class})
public class ElasticSearchSystemMetadataServiceFactory {
@Autowired

View File

@ -1,6 +1,5 @@
package com.linkedin.gms.factory.common;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
@ -17,10 +16,8 @@ import org.apache.http.ssl.SSLContextBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ElasticsearchSSLContextFactory {
@Value("${elasticsearch.sslContext.protocol}")

View File

@ -3,7 +3,6 @@ package com.linkedin.gms.factory.common;
import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService;
import com.linkedin.metadata.graph.neo4j.Neo4jGraphService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@ -13,10 +12,8 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({Neo4jGraphServiceFactory.class, ElasticSearchGraphServiceFactory.class})
public class GraphServiceFactory {
@Autowired

View File

@ -1,12 +1,10 @@
package com.linkedin.gms.factory.common;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
/**
* Creates a {@link IndexConvention} to generate search index names.
@ -14,7 +12,6 @@ import org.springframework.context.annotation.PropertySource;
* <p>This allows you to easily add prefixes to the index names.
*/
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class IndexConventionFactory {
public static final String INDEX_CONVENTION_BEAN = "searchIndexConvention";

View File

@ -1,8 +1,7 @@
package com.linkedin.gms.factory.common;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.utils.metrics.MetricUtils;
import io.ebean.config.ServerConfig;
import io.ebean.config.DatabaseConfig;
import io.ebean.datasource.DataSourceConfig;
import io.ebean.datasource.DataSourcePoolListener;
import java.sql.Connection;
@ -13,12 +12,10 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class LocalEbeanServerConfigFactory {
public class LocalEbeanConfigFactory {
@Value("${ebean.username}")
private String ebeanDatasourceUsername;
@ -91,11 +88,11 @@ public class LocalEbeanServerConfigFactory {
return dataSourceConfig;
}
@Bean(name = "gmsEbeanServiceConfig")
protected ServerConfig createInstance(
@Bean(name = "gmsEbeanDatabaseConfig")
protected DatabaseConfig createInstance(
@Qualifier("ebeanDataSourceConfig") DataSourceConfig config) {
ServerConfig serverConfig = new ServerConfig();
serverConfig.setName("gmsEbeanServiceConfig");
DatabaseConfig serverConfig = new DatabaseConfig();
serverConfig.setName("gmsEbeanDatabaseConfig");
serverConfig.setDataSourceConfig(config);
serverConfig.setDdlGenerate(ebeanAutoCreate);
serverConfig.setDdlRun(ebeanAutoCreate);

View File

@ -1,6 +1,5 @@
package com.linkedin.gms.factory.common;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.util.concurrent.TimeUnit;
import org.neo4j.driver.AuthTokens;
import org.neo4j.driver.Config;
@ -9,10 +8,8 @@ import org.neo4j.driver.GraphDatabase;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class Neo4jDriverFactory {
@Value("${neo4j.username}")
private String username;

View File

@ -0,0 +1,25 @@
package com.linkedin.gms.factory.common;
import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH;
import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ObjectMapperFactory {
@Bean
public ObjectMapper objectMapper() {
ObjectMapper objectMapper = new ObjectMapper();
int maxSize =
Integer.parseInt(
System.getenv()
.getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE));
objectMapper
.getFactory()
.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build());
return objectMapper;
}
}

View File

@ -1,7 +1,6 @@
package com.linkedin.gms.factory.common;
import com.linkedin.gms.factory.auth.AwsRequestSigningApacheInterceptor;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.io.IOException;
import javax.annotation.Nonnull;
import javax.net.ssl.HostnameVerifier;
@ -38,13 +37,11 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider;
import software.amazon.awssdk.auth.signer.Aws4Signer;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({ElasticsearchSSLContextFactory.class})
public class RestHighLevelClientFactory {

View File

@ -15,15 +15,16 @@ import com.linkedin.metadata.config.cache.CacheConfiguration;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.config.search.ElasticSearchConfiguration;
import com.linkedin.metadata.config.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import lombok.Data;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.stereotype.Component;
@Configuration
@Component
// Include extra kafka properties
@EnableConfigurationProperties(KafkaProperties.class)
@ConfigurationProperties
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Data
public class ConfigurationProvider {
/** Authentication related configs */

View File

@ -1,6 +1,5 @@
package com.linkedin.gms.factory.context.services;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.services.RestrictedService;
import io.datahubproject.metadata.services.SecretService;
import javax.annotation.Nonnull;
@ -8,11 +7,9 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class RestrictedServiceFactory {
@Autowired

View File

@ -3,17 +3,14 @@ package com.linkedin.gms.factory.dataproduct;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.graph.GraphClient;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class DataProductServiceFactory {
@Autowired

View File

@ -2,29 +2,24 @@ package com.linkedin.gms.factory.entity;
import com.linkedin.metadata.entity.ebean.EbeanAspectV2;
import io.ebean.Database;
import io.ebean.config.ServerConfig;
import io.ebean.config.DatabaseConfig;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
@Configuration
@Slf4j
public class EbeanServerFactory {
public class EbeanDatabaseFactory {
public static final String EBEAN_MODEL_PACKAGE = EbeanAspectV2.class.getPackage().getName();
@Autowired ApplicationContext applicationContext;
@Bean(name = "ebeanServer")
@DependsOn({"gmsEbeanServiceConfig"})
@Bean("ebeanServer")
@ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true)
@Nonnull
protected Database createServer() {
ServerConfig serverConfig = applicationContext.getBean(ServerConfig.class);
protected Database createServer(
@Qualifier("gmsEbeanDatabaseConfig") DatabaseConfig serverConfig) {
// Make sure that the serverConfig includes the package that contains DAO's Ebean model.
if (!serverConfig.getPackages().contains(EBEAN_MODEL_PACKAGE)) {
serverConfig.getPackages().add(EBEAN_MODEL_PACKAGE);

View File

@ -7,6 +7,7 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao;
import com.linkedin.metadata.entity.ebean.EbeanAspectDao;
import io.ebean.Database;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -16,11 +17,11 @@ import org.springframework.context.annotation.DependsOn;
public class EntityAspectDaoFactory {
@Bean(name = "entityAspectDao")
@DependsOn({"gmsEbeanServiceConfig"})
@ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true)
@Nonnull
protected AspectDao createEbeanInstance(
Database server, final ConfigurationProvider configurationProvider) {
@Qualifier("ebeanServer") final Database server,
final ConfigurationProvider configurationProvider) {
return new EbeanAspectDao(server, configurationProvider.getEbean());
}

View File

@ -7,6 +7,7 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao;
import com.linkedin.metadata.entity.ebean.EbeanAspectDao;
import io.ebean.Database;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -16,11 +17,11 @@ import org.springframework.context.annotation.DependsOn;
public class EntityAspectMigrationsDaoFactory {
@Bean(name = "entityAspectMigrationsDao")
@DependsOn({"gmsEbeanServiceConfig"})
@ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true)
@Nonnull
protected AspectMigrationsDao createEbeanInstance(
Database server, final ConfigurationProvider configurationProvider) {
@Qualifier("ebeanServer") final Database server,
final ConfigurationProvider configurationProvider) {
return new EbeanAspectDao(server, configurationProvider.getEbean());
}

View File

@ -6,7 +6,6 @@ import com.linkedin.metadata.entity.RetentionService;
import com.linkedin.metadata.entity.cassandra.CassandraRetentionService;
import com.linkedin.metadata.entity.ebean.EbeanRetentionService;
import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.ebean.Database;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
@ -16,10 +15,8 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class RetentionServiceFactory {
@Autowired
@ -41,10 +38,11 @@ public class RetentionServiceFactory {
}
@Bean(name = "retentionService")
@DependsOn({"ebeanServer", "entityService"})
@DependsOn("entityService")
@ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true)
@Nonnull
protected RetentionService<ChangeItemImpl> createEbeanInstance(Database server) {
protected RetentionService<ChangeItemImpl> createEbeanInstance(
@Qualifier("ebeanServer") final Database server) {
RetentionService<ChangeItemImpl> retentionService =
new EbeanRetentionService<>(_entityService, server, _batchSize);
_entityService.setRetentionService(retentionService);

View File

@ -2,14 +2,11 @@ package com.linkedin.gms.factory.entityclient;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class EntityClientConfigFactory {
@Bean

View File

@ -13,18 +13,15 @@ import com.linkedin.metadata.search.LineageSearchService;
import com.linkedin.metadata.search.SearchService;
import com.linkedin.metadata.search.client.CachingEntitySearchService;
import com.linkedin.metadata.service.RollbackService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import javax.inject.Singleton;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
/** The *Java* Entity Client should be preferred if executing within the GMS service. */
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@ConditionalOnProperty(name = "entityClient.impl", havingValue = "java", matchIfMissing = true)
public class JavaEntityClientFactory {

View File

@ -6,7 +6,6 @@ import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.entity.client.SystemRestliEntityClient;
import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig;
import com.linkedin.metadata.restli.DefaultRestliClientFactory;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.parseq.retry.backoff.ExponentialBackoff;
import com.linkedin.restli.client.Client;
import java.net.URI;
@ -15,11 +14,9 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
/** The Java Entity Client should be preferred if executing within the GMS service. */
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@ConditionalOnProperty(name = "entityClient.impl", havingValue = "restli")
public class RestliEntityClientFactory {

View File

@ -2,17 +2,14 @@ package com.linkedin.gms.factory.entityregistry;
import com.linkedin.metadata.models.registry.ConfigEntityRegistry;
import com.linkedin.metadata.models.registry.EntityRegistryException;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.io.IOException;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.io.Resource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ConfigEntityRegistryFactory {
@Value("${configEntityRegistry.path}")

View File

@ -2,16 +2,13 @@ package com.linkedin.gms.factory.ermodelrelation;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.ERModelRelationshipService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ERModelRelationshipServiceFactory {
@Bean(name = "erModelRelationshipService")
@Scope("singleton")

View File

@ -2,15 +2,12 @@ package com.linkedin.gms.factory.form;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.FormService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class FormServiceFactory {
@Bean(name = "formService")
@Scope("singleton")

View File

@ -3,16 +3,13 @@ package com.linkedin.gms.factory.incident;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.auth.SystemAuthenticationFactory;
import com.linkedin.metadata.service.IncidentService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({SystemAuthenticationFactory.class})
public class IncidentServiceFactory {
@Bean(name = "incidentService")

View File

@ -4,7 +4,6 @@ import com.datahub.metadata.ingestion.IngestionScheduler;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.gms.factory.auth.SystemAuthenticationFactory;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.datahubproject.metadata.context.OperationContext;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
@ -12,11 +11,9 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Import({SystemAuthenticationFactory.class})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class IngestionSchedulerFactory {
@Autowired

View File

@ -1,9 +1,8 @@
package com.linkedin.gms.factory.kafka;
import com.linkedin.gms.factory.common.TopicConventionFactory;
import com.linkedin.gms.factory.kafka.common.TopicConventionFactory;
import com.linkedin.metadata.dao.producer.KafkaEventProducer;
import com.linkedin.metadata.dao.producer.KafkaHealthChecker;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.mxe.TopicConvention;
import org.apache.avro.generic.IndexedRecord;
import org.apache.kafka.clients.producer.Producer;
@ -11,12 +10,8 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({DataHubKafkaProducerFactory.class, TopicConventionFactory.class, KafkaHealthChecker.class})
public class DataHubKafkaEventProducerFactory {
@Autowired

View File

@ -1,12 +1,8 @@
package com.linkedin.gms.factory.kafka;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.util.Arrays;
import java.util.Map;
import org.apache.avro.generic.IndexedRecord;
@ -14,36 +10,24 @@ import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.DependsOn;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@EnableConfigurationProperties({KafkaProperties.class})
@Import({
KafkaSchemaRegistryFactory.class,
AwsGlueSchemaRegistryFactory.class,
InternalSchemaRegistryFactory.class
})
@DependsOn("configurationProvider")
public class DataHubKafkaProducerFactory {
@Autowired
@Qualifier("schemaRegistryConfig")
private SchemaRegistryConfig _schemaRegistryConfig;
@Bean(name = "kafkaProducer")
protected Producer<String, IndexedRecord> createInstance(
@Qualifier("configurationProvider") ConfigurationProvider provider,
KafkaProperties properties) {
final KafkaProperties properties,
@Qualifier("schemaRegistryConfig") final SchemaRegistryConfig schemaRegistryConfig) {
KafkaConfiguration kafkaConfiguration = provider.getKafka();
return new KafkaProducer<>(
buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties));
buildProducerProperties(schemaRegistryConfig, kafkaConfiguration, properties));
}
public static Map<String, Object> buildProducerProperties(

View File

@ -1,9 +1,6 @@
package com.linkedin.gms.factory.kafka;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory;
import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig;
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import java.time.Duration;
@ -17,7 +14,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@ -29,11 +25,6 @@ import org.springframework.kafka.support.serializer.ErrorHandlingDeserializer;
@Slf4j
@Configuration
@Import({
KafkaSchemaRegistryFactory.class,
AwsGlueSchemaRegistryFactory.class,
InternalSchemaRegistryFactory.class
})
public class KafkaEventConsumerFactory {
private int kafkaEventConsumerConcurrency;

View File

@ -11,16 +11,16 @@ import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
@Slf4j
@Configuration
@EnableConfigurationProperties({KafkaProperties.class})
@DependsOn("configurationProvider")
public class SimpleKafkaConsumerFactory {
@Bean(name = "simpleKafkaConsumer")

View File

@ -1,4 +1,4 @@
package com.linkedin.gms.factory.common;
package com.linkedin.gms.factory.kafka.common;
import com.linkedin.mxe.TopicConvention;
import com.linkedin.mxe.TopicConventionImpl;

View File

@ -5,7 +5,6 @@ import com.amazonaws.services.schemaregistry.serializers.GlueSchemaRegistryKafka
import com.amazonaws.services.schemaregistry.utils.AWSSchemaRegistryConstants;
import com.amazonaws.services.schemaregistry.utils.AvroRecordType;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@ -15,11 +14,9 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@ConditionalOnProperty(
name = "kafka.schemaRegistry.type",
havingValue = AwsGlueSchemaRegistryFactory.TYPE)

View File

@ -1,7 +1,6 @@
package com.linkedin.gms.factory.kafka.schemaregistry;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig;
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
@ -16,14 +15,10 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@ConditionalOnProperty(
name = "kafka.schemaRegistry.type",
havingValue = KafkaSchemaRegistryFactory.TYPE)
@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = "KAFKA")
public class KafkaSchemaRegistryFactory {
public static final String TYPE = "KAFKA";

View File

@ -1,6 +1,6 @@
package com.linkedin.gms.factory.kafka.schemaregistry;
import com.linkedin.gms.factory.common.TopicConventionFactory;
import com.linkedin.gms.factory.kafka.common.TopicConventionFactory;
import com.linkedin.metadata.registry.SchemaRegistryService;
import com.linkedin.metadata.registry.SchemaRegistryServiceImpl;
import com.linkedin.mxe.TopicConvention;

View File

@ -2,16 +2,13 @@ package com.linkedin.gms.factory.lineage;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.LineageService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class LineageServiceFactory {
@Bean(name = "lineageService")

View File

@ -2,15 +2,12 @@ package com.linkedin.gms.factory.ownership;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.OwnershipTypeService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class OwnershipTypeServiceFactory {
@Bean(name = "ownerShipTypeService")

View File

@ -2,15 +2,12 @@ package com.linkedin.gms.factory.query;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.QueryService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class QueryServiceFactory {
@Bean(name = "queryService")

View File

@ -1,8 +1,6 @@
package com.linkedin.gms.factory.recommendation.candidatesource;
import com.linkedin.gms.factory.common.IndexConventionFactory;
import com.linkedin.gms.factory.common.RestHighLevelClientFactory;
import com.linkedin.gms.factory.entity.EntityServiceFactory;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.recommendation.candidatesource.RecentlyViewedSource;
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
@ -12,14 +10,8 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@Configuration
@Import({
RestHighLevelClientFactory.class,
IndexConventionFactory.class,
EntityServiceFactory.class
})
public class RecentlyViewedCandidateSourceFactory {
@Autowired
@Qualifier("elasticSearchRestHighLevelClient")

View File

@ -4,7 +4,6 @@ import com.linkedin.gms.factory.common.IndexConventionFactory;
import com.linkedin.gms.factory.common.RestHighLevelClientFactory;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder;
import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import javax.annotation.Nonnull;
import org.opensearch.client.RestHighLevelClient;
@ -14,7 +13,6 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
/** Factory for components required for any services using elasticsearch */
@Configuration
@ -24,7 +22,6 @@ import org.springframework.context.annotation.PropertySource;
ElasticSearchBulkProcessorFactory.class,
ElasticSearchIndexBuilderFactory.class
})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class BaseElasticSearchComponentsFactory {
@lombok.Value
public static class BaseElasticSearchComponents {

View File

@ -2,7 +2,6 @@ package com.linkedin.gms.factory.search;
import com.linkedin.metadata.search.EntitySearchService;
import com.linkedin.metadata.search.client.CachingEntitySearchService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
@ -11,10 +10,8 @@ import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class CachingEntitySearchServiceFactory {
@Autowired

View File

@ -2,7 +2,6 @@ package com.linkedin.gms.factory.search;
import com.linkedin.gms.factory.common.RestHighLevelClientFactory;
import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
import org.opensearch.action.support.WriteRequest;
@ -13,12 +12,10 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Slf4j
@Configuration
@Import({RestHighLevelClientFactory.class})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ElasticSearchBulkProcessorFactory {
@Autowired
@Qualifier("elasticSearchRestHighLevelClient")

View File

@ -9,7 +9,6 @@ import com.linkedin.gms.factory.common.IndexConventionFactory;
import com.linkedin.gms.factory.common.RestHighLevelClientFactory;
import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import com.linkedin.metadata.utils.elasticsearch.IndexConvention;
import com.linkedin.metadata.version.GitVersion;
import jakarta.annotation.Nonnull;
@ -25,11 +24,9 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Configuration
@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, GitVersionFactory.class})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ElasticSearchIndexBuilderFactory {
@Autowired

View File

@ -16,7 +16,6 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder;
import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO;
import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO;
import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import java.io.IOException;
import javax.annotation.Nonnull;
import lombok.extern.slf4j.Slf4j;
@ -25,11 +24,9 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Slf4j
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
@Import({EntityRegistryFactory.class, SettingsBuilderFactory.class})
public class ElasticSearchServiceFactory {
private static final ObjectMapper YAML_MAPPER = new YAMLMapper();

View File

@ -3,15 +3,12 @@ package com.linkedin.gms.factory.search;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class EntityIndexBuildersFactory {
@Autowired

View File

@ -5,18 +5,15 @@ import com.linkedin.gms.factory.config.ConfigurationProvider;
import com.linkedin.metadata.graph.GraphService;
import com.linkedin.metadata.search.LineageSearchService;
import com.linkedin.metadata.search.SearchService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.cache.CacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
@Configuration
@Import({GraphServiceFactory.class})
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class LineageSearchServiceFactory {
public static final String LINEAGE_SEARCH_SERVICE_CACHE_NAME = "relationshipSearchService";

View File

@ -1,14 +1,11 @@
package com.linkedin.gms.factory.search;
import com.linkedin.metadata.search.transformer.SearchDocumentTransformer;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class SearchDocumentTransformerFactory {
@Value("${elasticsearch.index.maxArrayLength}")
private int maxArrayLength;

View File

@ -7,17 +7,14 @@ import com.linkedin.metadata.search.SearchService;
import com.linkedin.metadata.search.cache.EntityDocCountCache;
import com.linkedin.metadata.search.client.CachingEntitySearchService;
import com.linkedin.metadata.search.ranker.SearchRanker;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class SearchServiceFactory {
@Autowired

View File

@ -3,18 +3,15 @@ package com.linkedin.gms.factory.search;
import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
@Configuration
@Import(EntityRegistryFactory.class)
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class SettingsBuilderFactory {
@Autowired
@Qualifier("entityRegistry")

View File

@ -2,15 +2,12 @@ package com.linkedin.gms.factory.search.views;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class ViewServiceFactory {
@Bean(name = "viewService")

View File

@ -2,15 +2,12 @@ package com.linkedin.gms.factory.settings;
import com.linkedin.entity.client.SystemEntityClient;
import com.linkedin.metadata.service.SettingsService;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import javax.annotation.Nonnull;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.annotation.Scope;
@Configuration
@PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class)
public class SettingsServiceFactory {
@Bean(name = "settingsService")
@Scope("singleton")

Some files were not shown because too many files have changed in this diff Show More