mirror of
https://github.com/datahub-project/datahub.git
synced 2025-09-18 21:49:41 +00:00
feat(build): gradle 8, jdk17, neo4j 5 (#9458)
This commit is contained in:
parent
6a16935728
commit
824df5a6a3
5
.github/workflows/airflow-plugin.yml
vendored
5
.github/workflows/airflow-plugin.yml
vendored
@ -49,6 +49,11 @@ jobs:
|
||||
extra_pip_extras: plugin-v2
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
|
4
.github/workflows/build-and-test.yml
vendored
4
.github/workflows/build-and-test.yml
vendored
@ -37,11 +37,11 @@ jobs:
|
||||
with:
|
||||
timezoneLinux: ${{ matrix.timezone }}
|
||||
- uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
4
.github/workflows/check-datahub-jars.yml
vendored
4
.github/workflows/check-datahub-jars.yml
vendored
@ -28,11 +28,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
39
.github/workflows/docker-unified.yml
vendored
39
.github/workflows/docker-unified.yml
vendored
@ -79,6 +79,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Pre-build artifacts for docker image
|
||||
@ -135,6 +140,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Pre-build artifacts for docker image
|
||||
@ -191,6 +201,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Pre-build artifacts for docker image
|
||||
@ -247,6 +262,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Pre-build artifacts for docker image
|
||||
@ -303,6 +323,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Pre-build artifacts for docker image
|
||||
@ -537,6 +562,11 @@ jobs:
|
||||
needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }}
|
||||
needs: [setup, datahub_ingestion_base_slim_build]
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- uses: dorny/paths-filter@v2
|
||||
@ -618,6 +648,11 @@ jobs:
|
||||
needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }}
|
||||
needs: [setup, datahub_ingestion_base_full_build]
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- name: Check out the repo
|
||||
uses: hsheth2/sane-checkout-action@v1
|
||||
- uses: dorny/paths-filter@v2
|
||||
@ -720,11 +755,11 @@ jobs:
|
||||
run: df -h . && docker images
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
4
.github/workflows/documentation.yml
vendored
4
.github/workflows/documentation.yml
vendored
@ -27,11 +27,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
5
.github/workflows/metadata-ingestion.yml
vendored
5
.github/workflows/metadata-ingestion.yml
vendored
@ -44,6 +44,11 @@ jobs:
|
||||
- python-version: "3.10"
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
|
4
.github/workflows/metadata-io.yml
vendored
4
.github/workflows/metadata-io.yml
vendored
@ -29,11 +29,11 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
5
.github/workflows/metadata-model.yml
vendored
5
.github/workflows/metadata-model.yml
vendored
@ -29,6 +29,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
steps:
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 17
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
|
4
.github/workflows/publish-datahub-jars.yml
vendored
4
.github/workflows/publish-datahub-jars.yml
vendored
@ -49,11 +49,11 @@ jobs:
|
||||
if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }}
|
||||
steps:
|
||||
- uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
4
.github/workflows/spark-smoke-test.yml
vendored
4
.github/workflows/spark-smoke-test.yml
vendored
@ -30,11 +30,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: hsheth2/sane-checkout-action@v1
|
||||
- name: Set up JDK 11
|
||||
- name: Set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: 11
|
||||
java-version: 17
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
137
build.gradle
137
build.gradle
@ -1,17 +1,20 @@
|
||||
buildscript {
|
||||
ext.jdkVersion = 17
|
||||
ext.javaClassVersion = 11
|
||||
|
||||
ext.junitJupiterVersion = '5.6.1'
|
||||
// Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md
|
||||
ext.pegasusVersion = '29.46.8'
|
||||
ext.pegasusVersion = '29.48.4'
|
||||
ext.mavenVersion = '3.6.3'
|
||||
ext.springVersion = '5.3.29'
|
||||
ext.springBootVersion = '2.7.14'
|
||||
ext.openTelemetryVersion = '1.18.0'
|
||||
ext.neo4jVersion = '4.4.9'
|
||||
ext.neo4jTestVersion = '4.4.25'
|
||||
ext.neo4jApocVersion = '4.4.0.20:all'
|
||||
ext.neo4jVersion = '5.14.0'
|
||||
ext.neo4jTestVersion = '5.14.0'
|
||||
ext.neo4jApocVersion = '5.14.0'
|
||||
ext.testContainersVersion = '1.17.4'
|
||||
ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x
|
||||
ext.jacksonVersion = '2.15.2'
|
||||
ext.jacksonVersion = '2.15.3'
|
||||
ext.jettyVersion = '9.4.46.v20220331'
|
||||
ext.playVersion = '2.8.18'
|
||||
ext.log4jVersion = '2.19.0'
|
||||
@ -29,19 +32,19 @@ buildscript {
|
||||
buildscript.repositories.addAll(project.repositories)
|
||||
dependencies {
|
||||
classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion
|
||||
classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4'
|
||||
classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1'
|
||||
classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0'
|
||||
classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion
|
||||
classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0"
|
||||
classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0"
|
||||
classpath "org.gradle.playframework:gradle-playframework:0.14"
|
||||
classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1"
|
||||
classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2"
|
||||
}
|
||||
}
|
||||
|
||||
plugins {
|
||||
id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2'
|
||||
id 'com.github.johnrengelman.shadow' version '6.1.0'
|
||||
id 'com.gorylenko.gradle-git-properties' version '2.4.1'
|
||||
id 'com.github.johnrengelman.shadow' version '8.1.1' apply false
|
||||
id 'com.palantir.docker' version '0.35.0' apply false
|
||||
id "com.diffplug.spotless" version "6.23.3"
|
||||
// https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/
|
||||
@ -149,19 +152,20 @@ project.ext.externalDependency = [
|
||||
'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion",
|
||||
'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion",
|
||||
'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion",
|
||||
'lombok': 'org.projectlombok:lombok:1.18.16',
|
||||
'lombok': 'org.projectlombok:lombok:1.18.30',
|
||||
'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0',
|
||||
'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion",
|
||||
'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4',
|
||||
'mockito': 'org.mockito:mockito-core:3.0.0',
|
||||
'mockitoInline': 'org.mockito:mockito-inline:3.0.0',
|
||||
'mockito': 'org.mockito:mockito-core:4.11.0',
|
||||
'mockitoInline': 'org.mockito:mockito-inline:4.11.0',
|
||||
'mockServer': 'org.mock-server:mockserver-netty:5.11.2',
|
||||
'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2',
|
||||
'mysqlConnector': 'mysql:mysql-connector-java:8.0.20',
|
||||
'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion,
|
||||
'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion,
|
||||
'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion,
|
||||
'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion,
|
||||
'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion,
|
||||
'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion,
|
||||
'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion,
|
||||
'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion,
|
||||
'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15',
|
||||
@ -190,8 +194,8 @@ project.ext.externalDependency = [
|
||||
'servletApi': 'javax.servlet:javax.servlet-api:3.1.0',
|
||||
'shiroCore': 'org.apache.shiro:shiro-core:1.11.0',
|
||||
'snakeYaml': 'org.yaml:snakeyaml:2.0',
|
||||
'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8',
|
||||
'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8',
|
||||
'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3',
|
||||
'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3',
|
||||
'springBeans': "org.springframework:spring-beans:$springVersion",
|
||||
'springContext': "org.springframework:spring-context:$springVersion",
|
||||
'springCore': "org.springframework:spring-core:$springVersion",
|
||||
@ -210,7 +214,6 @@ project.ext.externalDependency = [
|
||||
'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion",
|
||||
'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15',
|
||||
'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46',
|
||||
'testngJava8': 'org.testng:testng:7.5.1',
|
||||
'testng': 'org.testng:testng:7.8.0',
|
||||
'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion,
|
||||
'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion,
|
||||
@ -226,13 +229,69 @@ project.ext.externalDependency = [
|
||||
'charle': 'com.charleskorn.kaml:kaml:0.53.0',
|
||||
'common': 'commons-io:commons-io:2.7',
|
||||
'jline':'jline:jline:1.4.1',
|
||||
'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0'
|
||||
'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0',
|
||||
'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2'
|
||||
]
|
||||
|
||||
allprojects {
|
||||
apply plugin: 'idea'
|
||||
apply plugin: 'eclipse'
|
||||
// apply plugin: 'org.gradlex.java-ecosystem-capabilities'
|
||||
|
||||
tasks.withType(Test).configureEach {
|
||||
// https://docs.gradle.org/current/userguide/performance.html
|
||||
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1
|
||||
|
||||
if (project.configurations.getByName("testImplementation").getDependencies()
|
||||
.any{ it.getName().contains("testng") }) {
|
||||
useTestNG()
|
||||
}
|
||||
}
|
||||
|
||||
if (project.plugins.hasPlugin('java')
|
||||
|| project.plugins.hasPlugin('java-library')
|
||||
|| project.plugins.hasPlugin('application')
|
||||
|| project.plugins.hasPlugin('pegasus')) {
|
||||
|
||||
java {
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(jdkVersion)
|
||||
}
|
||||
}
|
||||
|
||||
compileJava {
|
||||
options.release = javaClassVersion
|
||||
}
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(jdkVersion)
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(JavaExec).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(jdkVersion)
|
||||
}
|
||||
}
|
||||
|
||||
// not duplicated, need to set this outside and inside afterEvaluate
|
||||
afterEvaluate {
|
||||
compileJava {
|
||||
options.release = javaClassVersion
|
||||
}
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(jdkVersion)
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(JavaExec).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(jdkVersion)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) {
|
||||
@ -264,8 +323,9 @@ subprojects {
|
||||
failOnNoGitDirectory = false
|
||||
}
|
||||
|
||||
plugins.withType(JavaPlugin) {
|
||||
plugins.withType(JavaPlugin).configureEach {
|
||||
dependencies {
|
||||
implementation externalDependency.annotationApi
|
||||
constraints {
|
||||
implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion")
|
||||
implementation('io.netty:netty-all:4.1.100.Final')
|
||||
@ -276,18 +336,30 @@ subprojects {
|
||||
implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion")
|
||||
}
|
||||
}
|
||||
|
||||
spotless {
|
||||
java {
|
||||
googleJavaFormat()
|
||||
target project.fileTree(project.projectDir) {
|
||||
include '**/*.java'
|
||||
exclude 'build/**/*.java'
|
||||
exclude '**/generated/**/*.*'
|
||||
exclude '**/mainGeneratedDataTemplate/**/*.*'
|
||||
exclude '**/mainGeneratedRest/**/*.*'
|
||||
include 'src/**/*.java'
|
||||
exclude 'src/**/resources/'
|
||||
exclude 'src/**/generated/'
|
||||
exclude 'src/**/mainGeneratedDataTemplate/'
|
||||
exclude 'src/**/mainGeneratedRest/'
|
||||
exclude 'src/renamed/avro/'
|
||||
exclude 'src/test/sample-test-plugins/'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (project.plugins.hasPlugin('pegasus')) {
|
||||
dependencies {
|
||||
dataTemplateCompile spec.product.pegasus.data
|
||||
dataTemplateCompile externalDependency.annotationApi // support > jdk8
|
||||
restClientCompile spec.product.pegasus.restliClient
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
def spotlessJavaTask = tasks.findByName('spotlessJava')
|
||||
def processTask = tasks.findByName('processResources')
|
||||
@ -305,28 +377,11 @@ subprojects {
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(11)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(11)
|
||||
}
|
||||
// https://docs.gradle.org/current/userguide/performance.html
|
||||
maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1
|
||||
|
||||
if (project.configurations.getByName("testImplementation").getDependencies()
|
||||
.any{ it.getName().contains("testng") }) {
|
||||
useTestNG()
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
if (project.plugins.hasPlugin('pegasus')) {
|
||||
dependencies {
|
||||
dataTemplateCompile spec.product.pegasus.data
|
||||
dataTemplateCompile externalDependency.annotationApi // support > jdk8
|
||||
restClientCompile spec.product.pegasus.restliClient
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,11 @@
|
||||
apply plugin: 'java'
|
||||
|
||||
buildscript {
|
||||
apply from: '../repositories.gradle'
|
||||
}
|
||||
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
/**
|
||||
* Forked version of abandoned repository: https://github.com/fge/json-schema-avro
|
||||
@ -21,6 +23,9 @@ dependencies {
|
||||
implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5'
|
||||
implementation 'commons-io:commons-io:2.11.0'
|
||||
|
||||
compileOnly 'org.projectlombok:lombok:1.18.14'
|
||||
annotationProcessor 'org.projectlombok:lombok:1.18.14'
|
||||
compileOnly 'org.projectlombok:lombok:1.18.30'
|
||||
annotationProcessor 'org.projectlombok:lombok:1.18.30'
|
||||
|
||||
// pegasus dependency, overrides for tasks
|
||||
implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4'
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,124 @@
|
||||
package com.linkedin.pegasus.gradle.tasks;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
import org.gradle.api.DefaultTask;
|
||||
import org.gradle.api.file.FileCollection;
|
||||
import org.gradle.api.specs.Specs;
|
||||
import org.gradle.api.tasks.InputFiles;
|
||||
import org.gradle.api.tasks.Internal;
|
||||
import org.gradle.api.tasks.SkipWhenEmpty;
|
||||
import org.gradle.api.tasks.TaskAction;
|
||||
import org.gradle.work.FileChange;
|
||||
import org.gradle.work.InputChanges;
|
||||
|
||||
|
||||
public class ChangedFileReportTask extends DefaultTask
|
||||
{
|
||||
private final Collection<String> _needCheckinFiles = new ArrayList<>();
|
||||
|
||||
private FileCollection _idlFiles = getProject().files();
|
||||
private FileCollection _snapshotFiles = getProject().files();
|
||||
|
||||
public ChangedFileReportTask()
|
||||
{
|
||||
//with Gradle 6.0, Declaring an incremental task without outputs is not allowed.
|
||||
getOutputs().upToDateWhen(Specs.satisfyNone());
|
||||
}
|
||||
|
||||
// DataHub Note - updated for InputChanges
|
||||
@TaskAction
|
||||
public void checkFilesForChanges(InputChanges inputChanges)
|
||||
{
|
||||
getLogger().lifecycle("Checking idl and snapshot files for changes...");
|
||||
getLogger().info("idlFiles: " + _idlFiles.getAsPath());
|
||||
getLogger().info("snapshotFiles: " + _snapshotFiles.getAsPath());
|
||||
|
||||
Set<String> filesRemoved = new HashSet<>();
|
||||
Set<String> filesAdded = new HashSet<>();
|
||||
Set<String> filesChanged = new HashSet<>();
|
||||
|
||||
if (inputChanges.isIncremental())
|
||||
{
|
||||
Consumer<FileChange> handleChange = change ->
|
||||
{
|
||||
switch (change.getChangeType()) {
|
||||
case ADDED:
|
||||
filesAdded.add(change.getFile().getAbsolutePath());
|
||||
break;
|
||||
case REMOVED:
|
||||
filesRemoved.add(change.getFile().getAbsolutePath());
|
||||
break;
|
||||
case MODIFIED:
|
||||
filesChanged.add(change.getFile().getAbsolutePath());
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
inputChanges.getFileChanges(_idlFiles).forEach(handleChange);
|
||||
inputChanges.getFileChanges(_snapshotFiles).forEach(handleChange);
|
||||
|
||||
if (!filesRemoved.isEmpty())
|
||||
{
|
||||
String files = joinByComma(filesRemoved);
|
||||
_needCheckinFiles.add(files);
|
||||
getLogger().lifecycle(
|
||||
"The following files have been removed, be sure to remove them from source control: {}", files);
|
||||
}
|
||||
|
||||
if (!filesAdded.isEmpty())
|
||||
{
|
||||
String files = joinByComma(filesAdded);
|
||||
_needCheckinFiles.add(files);
|
||||
getLogger().lifecycle("The following files have been added, be sure to add them to source control: {}", files);
|
||||
}
|
||||
|
||||
if (!filesChanged.isEmpty())
|
||||
{
|
||||
String files = joinByComma(filesChanged);
|
||||
_needCheckinFiles.add(files);
|
||||
getLogger().lifecycle(
|
||||
"The following files have been changed, be sure to commit the changes to source control: {}", files);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String joinByComma(Set<String> files)
|
||||
{
|
||||
return files.stream().collect(Collectors.joining(", "));
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
@SkipWhenEmpty
|
||||
public FileCollection getSnapshotFiles()
|
||||
{
|
||||
return _snapshotFiles;
|
||||
}
|
||||
|
||||
public void setSnapshotFiles(FileCollection snapshotFiles)
|
||||
{
|
||||
_snapshotFiles = snapshotFiles;
|
||||
}
|
||||
|
||||
@InputFiles
|
||||
@SkipWhenEmpty
|
||||
public FileCollection getIdlFiles()
|
||||
{
|
||||
return _idlFiles;
|
||||
}
|
||||
|
||||
public void setIdlFiles(FileCollection idlFiles)
|
||||
{
|
||||
_idlFiles = idlFiles;
|
||||
}
|
||||
|
||||
@Internal
|
||||
public Collection<String> getNeedCheckinFiles()
|
||||
{
|
||||
return _needCheckinFiles;
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@ plugins {
|
||||
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
|
||||
id 'scala'
|
||||
id 'com.palantir.docker'
|
||||
id 'org.gradle.playframework'
|
||||
}
|
||||
|
||||
apply from: "../gradle/versioning/versioning.gradle"
|
||||
@ -20,7 +21,6 @@ model {
|
||||
}
|
||||
|
||||
task myTar(type: Tar) {
|
||||
extension = "tgz"
|
||||
compression = Compression.GZIP
|
||||
|
||||
from("${buildDir}/stage")
|
||||
@ -119,3 +119,23 @@ task cleanLocalDockerImages {
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
||||
// gradle 8 fixes
|
||||
tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist'
|
||||
tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist'
|
||||
stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
|
||||
playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
|
||||
playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
|
||||
tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist
|
||||
tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts
|
||||
tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts
|
||||
tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts
|
||||
tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts
|
||||
tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts
|
||||
playBinaryDistTar.dependsOn createMainStartScripts
|
||||
playBinaryDistZip.dependsOn createMainStartScripts
|
||||
createMainStartScripts.dependsOn 'stageDatahub-frontendDist'
|
||||
createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist'
|
||||
createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist'
|
||||
createPlayBinaryTarDist.dependsOn 'stageMainDist'
|
||||
createPlayBinaryZipDist.dependsOn 'stageMainDist'
|
||||
|
@ -1,4 +1,3 @@
|
||||
apply plugin: "org.gradle.playframework"
|
||||
|
||||
// Change this to listen on a different port
|
||||
project.ext.httpPort = 9001
|
||||
@ -101,4 +100,22 @@ play {
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
|
||||
def playJava17CompatibleJvmArgs = [
|
||||
"--add-opens=java.base/java.lang=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.io=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.net=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.nio=ALL-UNNAMED",
|
||||
"--add-opens=java.base/java.util=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
|
||||
//"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED",
|
||||
]
|
||||
jvmArgs = playJava17CompatibleJvmArgs
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
|
||||
}
|
||||
apply plugin: 'java'
|
||||
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-service:restli-client')
|
||||
|
@ -1,8 +1,8 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'distribution'
|
||||
id 'com.github.node-gradle.node'
|
||||
}
|
||||
apply plugin: 'distribution'
|
||||
apply plugin: 'com.github.node-gradle.node'
|
||||
|
||||
node {
|
||||
|
||||
@ -35,7 +35,7 @@ node {
|
||||
yarnWorkDir = file("${project.projectDir}/.gradle/yarn")
|
||||
|
||||
// Set the work directory where node_modules should be located
|
||||
nodeModulesDir = file("${project.projectDir}")
|
||||
nodeProjectDir = file("${project.projectDir}")
|
||||
|
||||
}
|
||||
|
||||
@ -94,7 +94,7 @@ configurations {
|
||||
|
||||
distZip {
|
||||
dependsOn yarnQuickBuild
|
||||
baseName 'datahub-web-react'
|
||||
archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}"
|
||||
from 'dist'
|
||||
}
|
||||
|
||||
@ -112,5 +112,5 @@ jar {
|
||||
into('public') {
|
||||
from zipTree(distZip.outputs.files.first())
|
||||
}
|
||||
classifier = 'assets'
|
||||
archiveClassifier = 'assets'
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then
|
||||
# PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762
|
||||
RUN apk --no-cache --update-cache --available upgrade \
|
||||
&& apk --no-cache add curl sqlite libc6-compat java-snappy \
|
||||
&& apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/
|
||||
|
||||
ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
@ -25,7 +25,10 @@ ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
FROM base as prod-install
|
||||
|
||||
COPY ./datahub-frontend.zip /
|
||||
RUN unzip datahub-frontend.zip && rm datahub-frontend.zip
|
||||
RUN unzip datahub-frontend.zip -d /datahub-frontend \
|
||||
&& mv /datahub-frontend/main/* /datahub-frontend \
|
||||
&& rmdir /datahub-frontend/main \
|
||||
&& rm datahub-frontend.zip
|
||||
COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/
|
||||
RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend
|
||||
|
||||
|
@ -49,6 +49,8 @@ export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \
|
||||
-Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \
|
||||
-Dlogback.configurationFile=datahub-frontend/conf/logback.xml \
|
||||
-Dlogback.debug=false \
|
||||
--add-opens java.base/java.lang=ALL-UNNAMED \
|
||||
--add-opens=java.base/java.util=ALL-UNNAMED \
|
||||
${PROMETHEUS_AGENT:-} ${OTEL_AGENT:-} \
|
||||
${TRUSTSTORE_FILE:-} ${TRUSTSTORE_TYPE:-} ${TRUSTSTORE_PASSWORD:-} \
|
||||
${HTTP_PROXY:-} ${HTTPS_PROXY:-} ${NO_PROXY:-} \
|
||||
|
@ -40,14 +40,14 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then
|
||||
# PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762
|
||||
RUN apk --no-cache --update-cache --available upgrade \
|
||||
&& apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \
|
||||
&& apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \
|
||||
&& wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \
|
||||
&& wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \
|
||||
&& cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
&& cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
COPY --from=binary /go/bin/dockerize /usr/local/bin
|
||||
|
||||
ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
|
@ -45,9 +45,9 @@ docker {
|
||||
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
tasks.getByName('docker').dependsOn(['build',
|
||||
':docker:datahub-ingestion-base:docker',
|
||||
':metadata-ingestion:codegen'])
|
||||
tasks.getByName('dockerPrepare').dependsOn(['build',
|
||||
':docker:datahub-ingestion-base:docker',
|
||||
':metadata-ingestion:codegen'])
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
|
@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0
|
||||
# PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762
|
||||
RUN apk --no-cache --update-cache --available upgrade \
|
||||
&& apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \
|
||||
&& apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \
|
||||
&& wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \
|
||||
&& wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \
|
||||
&& cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
&& cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
COPY --from=binary /go/bin/dockerize /usr/local/bin
|
||||
|
||||
ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
|
@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0
|
||||
# PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762
|
||||
RUN apk --no-cache --update-cache --available upgrade \
|
||||
&& apk --no-cache add curl bash sqlite libc6-compat java-snappy \
|
||||
&& apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \
|
||||
&& wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \
|
||||
&& wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \
|
||||
&& cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
&& cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
COPY --from=binary /go/bin/dockerize /usr/local/bin
|
||||
|
||||
FROM base as prod-install
|
||||
|
@ -38,13 +38,13 @@ ENV JMX_VERSION=0.18.0
|
||||
# PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762
|
||||
RUN apk --no-cache --update-cache --available upgrade \
|
||||
&& apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \
|
||||
&& apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \
|
||||
&& curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \
|
||||
&& wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \
|
||||
&& wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \
|
||||
&& cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
&& cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks
|
||||
COPY --from=binary /go/bin/dockerize /usr/local/bin
|
||||
|
||||
ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
|
@ -31,7 +31,7 @@ LABEL name="kafka" version=${KAFKA_VERSION}
|
||||
RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi
|
||||
|
||||
RUN apk add --no-cache bash coreutils
|
||||
RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community
|
||||
RUN apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community
|
||||
|
||||
RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip
|
||||
RUN mkdir -p /opt \
|
||||
|
@ -1,5 +1,7 @@
|
||||
apply plugin: 'distribution'
|
||||
apply plugin: 'com.github.node-gradle.node'
|
||||
plugins {
|
||||
id 'distribution'
|
||||
id 'com.github.node-gradle.node'
|
||||
}
|
||||
|
||||
node {
|
||||
|
||||
@ -12,10 +14,10 @@ node {
|
||||
}
|
||||
|
||||
// Version of node to use.
|
||||
version = '16.16.0'
|
||||
version = '21.2.0'
|
||||
|
||||
// Version of Yarn to use.
|
||||
yarnVersion = '1.22.0'
|
||||
yarnVersion = '1.22.1'
|
||||
|
||||
// Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror).
|
||||
if (project.hasProperty('nodeDistBaseUrl')) {
|
||||
@ -31,7 +33,7 @@ node {
|
||||
yarnWorkDir = file("${project.projectDir}/.gradle/yarn")
|
||||
|
||||
// Set the work directory where node_modules should be located
|
||||
nodeModulesDir = file("${project.projectDir}")
|
||||
nodeProjectDir = file("${project.projectDir}")
|
||||
|
||||
}
|
||||
/*
|
||||
@ -122,7 +124,11 @@ task yarnBuild(type: YarnTask, dependsOn: [yarnLint, yarnGenerate, downloadHisto
|
||||
// See https://stackoverflow.com/questions/53230823/fatal-error-ineffective-mark-compacts-near-heap-limit-allocation-failed-java
|
||||
// and https://github.com/facebook/docusaurus/issues/8329.
|
||||
// TODO: As suggested in https://github.com/facebook/docusaurus/issues/4765, try switching to swc-loader.
|
||||
environment = ['NODE_OPTIONS': '--max-old-space-size=10248']
|
||||
if (project.hasProperty('useSystemNode') && project.getProperty('useSystemNode').toBoolean()) {
|
||||
environment = ['NODE_OPTIONS': '--max-old-space-size=10248']
|
||||
} else {
|
||||
environment = ['NODE_OPTIONS': '--max-old-space-size=10248 --openssl-legacy-provider']
|
||||
}
|
||||
args = ['run', 'build']
|
||||
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ set -euxo pipefail
|
||||
|
||||
yum groupinstall "Development Tools" -y
|
||||
yum erase openssl-devel -y
|
||||
yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget -y
|
||||
yum install openssl11 openssl11-devel libffi-devel bzip2-devel wget nodejs -y
|
||||
|
||||
wget https://www.python.org/ftp/python/3.10.11/Python-3.10.11.tgz
|
||||
tar -xf Python-3.10.11.tgz
|
||||
|
@ -6,16 +6,12 @@ title: "Local Development"
|
||||
|
||||
## Requirements
|
||||
|
||||
- Both [Java 11 JDK](https://openjdk.org/projects/jdk/11/) and [Java 8 JDK](https://openjdk.java.net/projects/jdk8/)
|
||||
- [Java 17 JDK](https://openjdk.org/projects/jdk/17/)
|
||||
- [Python 3.10](https://www.python.org/downloads/release/python-3100/)
|
||||
- [Docker](https://www.docker.com/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/)
|
||||
- Docker engine with at least 8GB of memory to run tests.
|
||||
|
||||
:::caution
|
||||
|
||||
Do not try to use a JDK newer than JDK 11. The build process does not currently work with newer JDKs versions.
|
||||
|
||||
:::
|
||||
|
||||
On macOS, these can be installed using [Homebrew](https://brew.sh/).
|
||||
@ -147,11 +143,11 @@ You're probably using a Java version that's too new for gradle. Run the followin
|
||||
java --version
|
||||
```
|
||||
|
||||
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11).
|
||||
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17).
|
||||
|
||||
#### Getting `cannot find symbol` error for `javax.annotation.Generated`
|
||||
|
||||
Similar to the previous issue, please use Java 1.8 to build the project.
|
||||
Similar to the previous issue, please use Java 17 to build the project.
|
||||
You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details.
|
||||
|
||||
#### `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error
|
||||
|
@ -7,11 +7,15 @@ This file documents any backwards-incompatible changes in DataHub and assists pe
|
||||
### Breaking Changes
|
||||
|
||||
- Updating MySQL version for quickstarts to 8.2, may cause quickstart issues for existing instances.
|
||||
- Neo4j 5.x, may require migration from 4.x
|
||||
- Build now requires JDK17 (Runtime Java 11)
|
||||
|
||||
### Potential Downtime
|
||||
|
||||
### Deprecations
|
||||
|
||||
- Spark 2.x (including previous JDK8 build requirements)
|
||||
|
||||
### Other Notable Changes
|
||||
|
||||
## 0.12.1
|
||||
|
@ -10,11 +10,11 @@ You're probably using a Java version that's too new for gradle. Run the followin
|
||||
java --version
|
||||
```
|
||||
|
||||
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11).
|
||||
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 17](https://openjdk.org/projects/jdk/17/) (aka Java 17).
|
||||
|
||||
## Getting `cannot find symbol` error for `javax.annotation.Generated`
|
||||
|
||||
Similar to the previous issue, please use Java 1.8 to build the project.
|
||||
Similar to the previous issue, please use Java 17 to build the project.
|
||||
You can install multiple version of Java on a single machine and switch between them using the `JAVA_HOME` environment variable. See [this document](https://docs.oracle.com/cd/E21454_01/html/821-2531/inst_jdk_javahome_t.html) for more details.
|
||||
|
||||
## `:metadata-models:generateDataTemplate` task fails with `java.nio.file.InvalidPathException: Illegal char <:> at index XX` or `Caused by: java.lang.IllegalArgumentException: 'other' has different root` error
|
||||
|
@ -1,10 +1,13 @@
|
||||
apply plugin: 'pegasus'
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'pegasus'
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation spec.product.pegasus.data
|
||||
implementation spec.product.pegasus.generator
|
||||
api project(path: ':metadata-models')
|
||||
api project(path: ':metadata-models', configuration: "dataTemplate")
|
||||
implementation externalDependency.slf4jApi
|
||||
compileOnly externalDependency.lombok
|
||||
implementation externalDependency.guava
|
||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.2-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0.2-bin.zip
|
||||
networkTimeout=10000
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
@ -1,16 +1,8 @@
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'pegasus'
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id 'pegasus'
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api spec.product.pegasus.data
|
||||
@ -28,7 +20,7 @@ dependencies {
|
||||
testImplementation externalDependency.commonsIo
|
||||
testImplementation project(':test-models')
|
||||
testImplementation project(path: ':test-models', configuration: 'testDataTemplate')
|
||||
testImplementation externalDependency.testngJava8
|
||||
testImplementation externalDependency.testng
|
||||
}
|
||||
|
||||
idea {
|
||||
@ -38,4 +30,4 @@ idea {
|
||||
}
|
||||
|
||||
// Need to compile backing java parameterDefinitions with the data template.
|
||||
sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/')
|
||||
sourceSets.mainGeneratedDataTemplate.java.srcDirs('src/main/javaPegasus/')
|
||||
|
@ -15,13 +15,12 @@ test {
|
||||
}
|
||||
|
||||
jar {
|
||||
archiveName = "$project.name-lib.jar"
|
||||
archiveClassifier = "lib"
|
||||
}
|
||||
|
||||
shadowJar {
|
||||
zip64 true
|
||||
classifier = null
|
||||
archiveName = "$project.name-${version}.jar"
|
||||
archiveClassifier = ""
|
||||
exclude "META-INF/*.RSA", "META-INF/*.SF","META-INF/*.DSA"
|
||||
}
|
||||
|
||||
@ -39,12 +38,12 @@ dependencies() {
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar) {
|
||||
classifier 'sources'
|
||||
archiveClassifier = 'sources'
|
||||
from sourceSets.main.allJava
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
archiveClassifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,11 @@
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api project(':metadata-events:mxe-avro')
|
||||
api project(':metadata-models')
|
||||
api project(path: ':metadata-models', configuration: "dataTemplate")
|
||||
api spec.product.pegasus.dataAvro
|
||||
|
||||
testImplementation externalDependency.testng
|
||||
|
@ -14,19 +14,9 @@ import org.apache.tools.ant.filters.ReplaceTokens
|
||||
|
||||
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-models')
|
||||
implementation project(path: ':metadata-models', configuration: "dataTemplate")
|
||||
implementation(externalDependency.kafkaAvroSerializer) {
|
||||
exclude group: "org.apache.avro"
|
||||
}
|
||||
@ -49,7 +39,7 @@ dependencies {
|
||||
annotationProcessor externalDependency.lombok
|
||||
// VisibleForTesting
|
||||
compileOnly externalDependency.guava
|
||||
testImplementation externalDependency.testngJava8
|
||||
testImplementation externalDependency.testng
|
||||
testImplementation externalDependency.mockito
|
||||
testImplementation externalDependency.mockServer
|
||||
testImplementation externalDependency.mockServerClient
|
||||
@ -241,4 +231,4 @@ sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/res
|
||||
|
||||
clean {
|
||||
project.delete("$projectDir/generated")
|
||||
}
|
||||
}
|
@ -64,10 +64,6 @@ protobuf {
|
||||
task publishSchema(dependsOn: build) {
|
||||
description "Publishes protobuf schema in the `main` sourceSet to DataHub"
|
||||
|
||||
def javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(11)
|
||||
}
|
||||
|
||||
fileTree("schema").matching {
|
||||
exclude "protobuf/meta/**"
|
||||
}.each {f ->
|
||||
|
@ -12,12 +12,6 @@ apply from: '../versioning.gradle'
|
||||
|
||||
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
||||
|
||||
afterEvaluate {
|
||||
if (project.plugins.hasPlugin('java')) {
|
||||
sourceCompatibility = 11
|
||||
targetCompatibility = 11
|
||||
}
|
||||
}
|
||||
ext {
|
||||
javaMainClass = "datahub.protobuf.Proto2DataHub"
|
||||
}
|
||||
@ -211,4 +205,4 @@ nexusStaging {
|
||||
password = System.getenv("NEXUS_PASSWORD")
|
||||
}
|
||||
|
||||
|
||||
startScripts.dependsOn shadowJar
|
@ -1,16 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'jacoco'
|
||||
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'jacoco'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -11,17 +11,6 @@ apply from: '../versioning.gradle'
|
||||
|
||||
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
//to rename artifacts for publish
|
||||
project.archivesBaseName = 'datahub-'+project.name
|
||||
|
||||
@ -34,18 +23,19 @@ configurations {
|
||||
|
||||
dependencies {
|
||||
|
||||
//Needed for tie breaking of guava version need for spark and wiremock
|
||||
provided(externalDependency.hadoopMapreduceClient) {
|
||||
force = true
|
||||
constraints {
|
||||
provided(externalDependency.hadoopMapreduceClient) {
|
||||
because 'Needed for tie breaking of guava version need for spark and wiremock'
|
||||
}
|
||||
provided(externalDependency.hadoopCommon) {
|
||||
because 'required for org.apache.hadoop.util.StopWatch'
|
||||
}
|
||||
provided(externalDependency.commonsIo) {
|
||||
because 'required for org.apache.commons.io.Charsets that is used internally'
|
||||
}
|
||||
}
|
||||
|
||||
provided(externalDependency.hadoopCommon) {
|
||||
force = true
|
||||
} // required for org.apache.hadoop.util.StopWatch
|
||||
|
||||
provided(externalDependency.commonsIo) {
|
||||
force = true
|
||||
} // required for org.apache.commons.io.Charsets that is used internally
|
||||
provided 'org.scala-lang:scala-library:2.12.18'
|
||||
|
||||
implementation externalDependency.slf4jApi
|
||||
compileOnly externalDependency.lombok
|
||||
@ -86,7 +76,7 @@ task checkShadowJar(type: Exec) {
|
||||
|
||||
shadowJar {
|
||||
zip64=true
|
||||
classifier=''
|
||||
archiveClassifier = ''
|
||||
mergeServiceFiles()
|
||||
|
||||
def exclude_modules = project
|
||||
@ -107,7 +97,7 @@ shadowJar {
|
||||
|
||||
// preventing java multi-release JAR leakage
|
||||
// https://github.com/johnrengelman/shadow/issues/729
|
||||
exclude('module-info.class', 'META-INF/versions/**')
|
||||
exclude('module-info.class', 'META-INF/versions/**', 'LICENSE', 'NOTICE')
|
||||
|
||||
// prevent jni conflict with spark
|
||||
exclude '**/libzstd-jni.*'
|
||||
@ -138,6 +128,25 @@ jacocoTestReport {
|
||||
test {
|
||||
forkEvery = 1
|
||||
useJUnit()
|
||||
|
||||
def sparkJava17CompatibleJvmArgs = [
|
||||
"--add-opens=java.base/java.lang=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.io=ALL-UNNAMED",
|
||||
"--add-opens=java.base/java.net=ALL-UNNAMED",
|
||||
"--add-opens=java.base/java.nio=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.util=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED",
|
||||
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
|
||||
//"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
|
||||
//"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED",
|
||||
]
|
||||
jvmArgs = sparkJava17CompatibleJvmArgs
|
||||
|
||||
finalizedBy jacocoTestReport
|
||||
}
|
||||
|
||||
@ -151,12 +160,12 @@ task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstartSlim'
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar) {
|
||||
classifier 'sources'
|
||||
archiveClassifier = 'sources'
|
||||
from sourceSets.main.allJava
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
archiveClassifier = 'javadoc'
|
||||
from javadoc.destinationDir
|
||||
}
|
||||
|
||||
@ -224,3 +233,12 @@ nexusStaging {
|
||||
username = System.getenv("NEXUS_USERNAME")
|
||||
password = System.getenv("NEXUS_PASSWORD")
|
||||
}
|
||||
|
||||
task cleanExtraDirs {
|
||||
delete "$projectDir/derby.log"
|
||||
delete "$projectDir/src/test/resources/data/hive"
|
||||
delete "$projectDir/src/test/resources/data/out.csv"
|
||||
delete "$projectDir/src/test/resources/data/out_persist.csv"
|
||||
delete "$projectDir/spark-smoke-test/venv"
|
||||
}
|
||||
clean.finalizedBy(cleanExtraDirs)
|
||||
|
@ -34,7 +34,9 @@ jar -tvf $jarFile |\
|
||||
grep -v "linux/" |\
|
||||
grep -v "darwin" |\
|
||||
grep -v "MetadataChangeProposal.avsc" |\
|
||||
grep -v "aix"
|
||||
grep -v "aix" |\
|
||||
grep -v "library.properties" |\
|
||||
grep -v "rootdoc.txt"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "✅ No unexpected class paths found in ${jarFile}"
|
||||
|
@ -17,7 +17,7 @@ RUN apt-get update -y && \
|
||||
apt-get install /tmp/zulu-repo_1.0.0-3_all.deb && \
|
||||
apt-get update && \
|
||||
# apt-cache search zulu && \
|
||||
apt-get install -y --no-install-recommends zulu11-jre && \
|
||||
apt-get install -y --no-install-recommends zulu17-jre && \
|
||||
apt-get clean && \
|
||||
curl -sS https://archive.apache.org/dist/spark/spark-${spark_version}/spark-${spark_version}-bin-hadoop${hadoop_version}.tgz -o spark.tgz && \
|
||||
tar -xf spark.tgz && \
|
||||
|
@ -7,25 +7,24 @@
|
||||
|
||||
saluation () {
|
||||
echo "--------------------------------------------------------"
|
||||
echo "Starting execution $1"
|
||||
echo "Starting execution $1 (properties: $2)"
|
||||
echo "--------------------------------------------------------"
|
||||
|
||||
}
|
||||
|
||||
saluation "HdfsIn2HdfsOut1.py"
|
||||
|
||||
saluation "HdfsIn2HdfsOut1.py" $2
|
||||
spark-submit --properties-file $2 HdfsIn2HdfsOut1.py
|
||||
|
||||
saluation "HdfsIn2HdfsOut2.py"
|
||||
saluation "HdfsIn2HdfsOut2.py" $2
|
||||
spark-submit --properties-file $2 HdfsIn2HdfsOut2.py
|
||||
|
||||
saluation "HdfsIn2HiveCreateTable.py"
|
||||
saluation "HdfsIn2HiveCreateTable.py" $2
|
||||
spark-submit --properties-file $2 HdfsIn2HiveCreateTable.py
|
||||
|
||||
saluation "HdfsIn2HiveCreateInsertTable.py"
|
||||
saluation "HdfsIn2HiveCreateInsertTable.py" $2
|
||||
spark-submit --properties-file $2 HdfsIn2HiveCreateInsertTable.py
|
||||
|
||||
saluation "HiveInHiveOut.py"
|
||||
saluation "HiveInHiveOut.py" $2
|
||||
spark-submit --properties-file $2 HiveInHiveOut.py
|
||||
|
||||
|
||||
|
@ -4,3 +4,7 @@ spark.jars file:///opt/workspace/datahub-spark-lineage*.jar
|
||||
spark.extraListeners datahub.spark.DatahubSparkListener
|
||||
|
||||
spark.datahub.rest.server http://datahub-gms:8080
|
||||
|
||||
spark.driver.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED
|
||||
spark.executor.extraJavaOptions --add-opens java.base/java.lang=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED
|
||||
|
||||
|
@ -17,17 +17,6 @@ repositories {
|
||||
jcenter()
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.apache.spark:spark-sql_2.11:2.4.8'
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ public class TestCoalesceJobLineage {
|
||||
|
||||
private static final String APP_NAME = "sparkCoalesceTestApp";
|
||||
|
||||
private static final String TEST_RELATIVE_PATH = "../";
|
||||
private static final String TEST_RELATIVE_PATH = "";
|
||||
private static final String RESOURCE_DIR = "src/test/resources";
|
||||
private static final String DATA_DIR = TEST_RELATIVE_PATH + RESOURCE_DIR + "/data";
|
||||
private static final String WAREHOUSE_LOC = DATA_DIR + "/hive/warehouse/coalesce";
|
||||
@ -142,6 +142,9 @@ public class TestCoalesceJobLineage {
|
||||
"spark.datahub.parent.datajob_urn",
|
||||
"urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)")
|
||||
.config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath())
|
||||
.config(
|
||||
"javax.jdo.option.ConnectionURL",
|
||||
"jdbc:derby:;databaseName=build/tmp/metastore_db_coalesce;create=true")
|
||||
.enableHiveSupport()
|
||||
.getOrCreate();
|
||||
|
||||
|
@ -191,6 +191,9 @@ public class TestSparkJobsLineage {
|
||||
.config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE)
|
||||
.config("spark.datahub.metadata.dataset.env", DATASET_ENV.name())
|
||||
.config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath())
|
||||
.config(
|
||||
"javax.jdo.option.ConnectionURL",
|
||||
"jdbc:derby:;databaseName=build/tmp/metastore_db_spark;create=true")
|
||||
.enableHiveSupport()
|
||||
.getOrCreate();
|
||||
|
||||
|
@ -62,7 +62,10 @@ dependencies {
|
||||
testImplementation externalDependency.h2
|
||||
testImplementation externalDependency.mysqlConnector
|
||||
testImplementation externalDependency.neo4jHarness
|
||||
testImplementation (externalDependency.neo4jApoc) {
|
||||
testImplementation (externalDependency.neo4jApocCore) {
|
||||
exclude group: 'org.yaml', module: 'snakeyaml'
|
||||
}
|
||||
testImplementation (externalDependency.neo4jApocCommon) {
|
||||
exclude group: 'org.yaml', module: 'snakeyaml'
|
||||
}
|
||||
testImplementation externalDependency.mockito
|
||||
|
@ -432,8 +432,8 @@ public class Neo4jGraphService implements GraphService {
|
||||
+ "(b)) "
|
||||
+ "WHERE a <> b "
|
||||
+ " AND ALL(rt IN relationships(path) WHERE "
|
||||
+ " (EXISTS(rt.source) AND rt.source = 'UI') OR "
|
||||
+ " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR "
|
||||
+ " (rt.source IS NOT NULL AND rt.source = 'UI') OR "
|
||||
+ " (rt.createdOn IS NULL AND rt.updatedOn IS NULL) OR "
|
||||
+ " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR "
|
||||
+ " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) "
|
||||
+ " ) "
|
||||
|
@ -60,6 +60,7 @@ task avroSchemaSources(type: Copy) {
|
||||
}
|
||||
|
||||
compileJava.dependsOn avroSchemaSources
|
||||
processResources.dependsOn avroSchemaSources
|
||||
|
||||
clean {
|
||||
project.delete("src/main/resources/avro")
|
||||
|
@ -1,8 +1,8 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'pegasus'
|
||||
}
|
||||
|
||||
apply plugin: 'pegasus'
|
||||
|
||||
configurations {
|
||||
avro
|
||||
@ -49,6 +49,7 @@ task avroSchemaSources(type: Copy) {
|
||||
}
|
||||
|
||||
compileJava.dependsOn avroSchemaSources
|
||||
processResources.dependsOn avroSchemaSources
|
||||
|
||||
clean {
|
||||
project.delete("src/main/resources/avro")
|
||||
|
@ -1,7 +1,7 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'pegasus'
|
||||
}
|
||||
apply plugin: 'pegasus'
|
||||
|
||||
configurations {
|
||||
avro
|
||||
@ -37,6 +37,7 @@ task avroSchemaSources(type: Copy) {
|
||||
}
|
||||
|
||||
compileJava.dependsOn avroSchemaSources
|
||||
processResources.dependsOn avroSchemaSources
|
||||
|
||||
clean {
|
||||
project.delete("src/main/resources/avro")
|
||||
|
@ -16,8 +16,8 @@ buildscript {
|
||||
plugins {
|
||||
id 'base'
|
||||
id 'maven-publish'
|
||||
id 'pegasus'
|
||||
}
|
||||
apply plugin: 'pegasus'
|
||||
|
||||
if (project.hasProperty('projVersion')) {
|
||||
project.version = project.projVersion
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(":entity-registry")
|
||||
|
@ -1,19 +1,11 @@
|
||||
import io.datahubproject.GenerateJsonSchemaTask
|
||||
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'pegasus'
|
||||
apply plugin: 'org.hidetake.swagger.generator'
|
||||
plugins {
|
||||
id 'pegasus'
|
||||
id 'java-library'
|
||||
id 'org.hidetake.swagger.generator'
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api spec.product.pegasus.data
|
||||
@ -35,7 +27,7 @@ dependencies {
|
||||
|
||||
swaggerCodegen externalDependency.swaggerCli
|
||||
testImplementation externalDependency.guava
|
||||
testImplementation externalDependency.testngJava8
|
||||
testImplementation externalDependency.testng
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(path: ':metadata-models')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-auth:auth-api')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
|
||||
compileJava {
|
||||
|
@ -8,7 +8,7 @@ import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.verifyZeroInteractions;
|
||||
import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.testng.Assert.assertEquals;
|
||||
import static org.testng.Assert.assertTrue;
|
||||
@ -68,7 +68,7 @@ public class DataPlatformInstanceFieldResolverProviderTest {
|
||||
|
||||
assertEquals(
|
||||
Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues());
|
||||
verifyZeroInteractions(entityClientMock);
|
||||
verifyNoMoreInteractions(entityClientMock);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-auth:auth-api')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api project(':metadata-io')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':datahub-graphql-core')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
||||
@ -30,4 +32,4 @@ test {
|
||||
|
||||
clean {
|
||||
dependsOn ':metadata-service:plugin:src:test:sample-test-plugins:clean'
|
||||
}
|
||||
}
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
jar {
|
||||
archiveFileName = "sample-plugins.jar"
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'pegasus'
|
||||
plugins {
|
||||
id 'pegasus'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
dataModel project(':metadata-models')
|
||||
@ -17,4 +19,4 @@ dependencies {
|
||||
because("CVE-2023-1428, CVE-2023-32731")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
apply plugin: 'pegasus'
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'pegasus'
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api project(':metadata-service:restli-api')
|
||||
|
@ -1,5 +1,7 @@
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'pegasus'
|
||||
plugins {
|
||||
id 'java'
|
||||
id 'pegasus'
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
integTest {
|
||||
|
@ -1,5 +1,8 @@
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'org.hidetake.swagger.generator'
|
||||
plugins {
|
||||
id 'org.hidetake.swagger.generator'
|
||||
id 'java'
|
||||
}
|
||||
|
||||
|
||||
dependencies {
|
||||
// Dependencies for open api
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-service:factories')
|
||||
|
@ -1,5 +1,7 @@
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'org.hidetake.swagger.generator'
|
||||
plugins {
|
||||
id 'org.hidetake.swagger.generator'
|
||||
id 'java'
|
||||
}
|
||||
|
||||
configurations {
|
||||
enhance
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':metadata-io')
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api externalDependency.avro
|
||||
|
@ -1,4 +1,6 @@
|
||||
apply plugin: 'java'
|
||||
plugins {
|
||||
id 'java'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':entity-registry')
|
||||
|
@ -11,10 +11,10 @@ node {
|
||||
}
|
||||
|
||||
// Version of node to use.
|
||||
version = '16.8.0'
|
||||
version = '21.2.0'
|
||||
|
||||
// Version of Yarn to use.
|
||||
yarnVersion = '1.22.0'
|
||||
yarnVersion = '1.22.1'
|
||||
|
||||
// Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror).
|
||||
if (project.hasProperty('nodeDistBaseUrl')) {
|
||||
@ -30,11 +30,12 @@ node {
|
||||
yarnWorkDir = file("${project.projectDir}/.gradle/yarn")
|
||||
|
||||
// Set the work directory where node_modules should be located
|
||||
nodeModulesDir = file("${project.projectDir}")
|
||||
nodeProjectDir = file("${project.projectDir}")
|
||||
|
||||
}
|
||||
|
||||
task yarnInstall(type: YarnTask) {
|
||||
println "Root directory: ${project.rootDir}";
|
||||
environment = ['NODE_OPTIONS': '--openssl-legacy-provider']
|
||||
args = ['install', '--cwd', "${project.rootDir}/smoke-test/tests/cypress"]
|
||||
}
|
@ -1,16 +1,8 @@
|
||||
apply plugin: 'pegasus'
|
||||
apply plugin: 'java-library'
|
||||
plugins {
|
||||
id 'pegasus'
|
||||
id 'java-library'
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile).configureEach {
|
||||
javaCompiler = javaToolchains.compilerFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
tasks.withType(Test).configureEach {
|
||||
javaLauncher = javaToolchains.launcherFor {
|
||||
languageVersion = JavaLanguageVersion.of(8)
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation spec.product.pegasus.data
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"buildCommand": "./gradlew :docs-website:build",
|
||||
"buildCommand": "./gradlew -PuseSystemNode=true :docs-website:build",
|
||||
"github": {
|
||||
"silent": true,
|
||||
"autoJobCancelation": true
|
||||
|
Loading…
x
Reference in New Issue
Block a user