mirror of
https://github.com/datahub-project/datahub.git
synced 2025-11-05 13:20:33 +00:00
refactor(java11) - convert most modules to java 11 (#5836)
Co-authored-by: Shirshanka Das <shirshanka@apache.org>
This commit is contained in:
parent
325b959ea6
commit
203a6ff57b
4
.github/workflows/build-and-test.yml
vendored
4
.github/workflows/build-and-test.yml
vendored
@ -25,10 +25,10 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
4
.github/workflows/check-datahub-jars.yml
vendored
4
.github/workflows/check-datahub-jars.yml
vendored
@ -30,10 +30,10 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
4
.github/workflows/docker-unified.yml
vendored
4
.github/workflows/docker-unified.yml
vendored
@ -359,10 +359,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
4
.github/workflows/documentation.yml
vendored
4
.github/workflows/documentation.yml
vendored
@ -19,10 +19,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|||||||
4
.github/workflows/metadata-io.yml
vendored
4
.github/workflows/metadata-io.yml
vendored
@ -29,10 +29,10 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
4
.github/workflows/publish-datahub-jars.yml
vendored
4
.github/workflows/publish-datahub-jars.yml
vendored
@ -53,10 +53,10 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
4
.github/workflows/spark-smoke-test.yml
vendored
4
.github/workflows/spark-smoke-test.yml
vendored
@ -28,10 +28,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Set up JDK 1.8
|
- name: Set up JDK 11
|
||||||
uses: actions/setup-java@v1
|
uses: actions/setup-java@v1
|
||||||
with:
|
with:
|
||||||
java-version: 1.8
|
java-version: 11
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@ -37,7 +37,7 @@ MANIFEST
|
|||||||
**/build
|
**/build
|
||||||
/config
|
/config
|
||||||
*/i18n
|
*/i18n
|
||||||
/out
|
out/
|
||||||
|
|
||||||
# Mac OS
|
# Mac OS
|
||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
|
|||||||
29
build.gradle
29
build.gradle
@ -11,9 +11,6 @@ buildscript {
|
|||||||
classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4'
|
classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4'
|
||||||
classpath 'com.commercehub.gradle.plugin:gradle-avro-plugin:0.8.1'
|
classpath 'com.commercehub.gradle.plugin:gradle-avro-plugin:0.8.1'
|
||||||
classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion
|
classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion
|
||||||
classpath('com.github.jengelman.gradle.plugins:shadow:5.2.0') {
|
|
||||||
exclude group: 'org.apache.logging.log4j', module: 'log4j-core'
|
|
||||||
}
|
|
||||||
classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0"
|
classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0"
|
||||||
classpath "com.palantir.gradle.gitversion:gradle-git-version:0.12.3"
|
classpath "com.palantir.gradle.gitversion:gradle-git-version:0.12.3"
|
||||||
classpath "org.gradle.playframework:gradle-playframework:0.12"
|
classpath "org.gradle.playframework:gradle-playframework:0.12"
|
||||||
@ -23,6 +20,7 @@ buildscript {
|
|||||||
|
|
||||||
plugins {
|
plugins {
|
||||||
id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2'
|
id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2'
|
||||||
|
id 'com.github.johnrengelman.shadow' version '6.1.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
project.ext.spec = [
|
project.ext.spec = [
|
||||||
@ -171,7 +169,7 @@ allprojects {
|
|||||||
apply plugin: 'checkstyle'
|
apply plugin: 'checkstyle'
|
||||||
}
|
}
|
||||||
|
|
||||||
configure(subprojects.findAll {it.name != 'spark-lineage'}) {
|
configure(subprojects.findAll {! it.name.startsWith('spark-lineage') }) {
|
||||||
|
|
||||||
configurations.all {
|
configurations.all {
|
||||||
exclude group: "io.netty", module: "netty"
|
exclude group: "io.netty", module: "netty"
|
||||||
@ -221,28 +219,6 @@ subprojects {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (project.name != 'datahub-protobuf') {
|
|
||||||
tasks.withType(JavaCompile).configureEach {
|
|
||||||
javaCompiler = javaToolchains.compilerFor {
|
|
||||||
languageVersion = JavaLanguageVersion.of(8)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tasks.withType(Test).configureEach {
|
|
||||||
javaLauncher = javaToolchains.launcherFor {
|
|
||||||
languageVersion = JavaLanguageVersion.of(8)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
tasks.withType(JavaExec).configureEach {
|
|
||||||
javaLauncher = javaToolchains.launcherFor {
|
|
||||||
languageVersion = JavaLanguageVersion.of(11)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tasks.withType(Javadoc).configureEach {
|
|
||||||
javadocTool = javaToolchains.javadocToolFor {
|
|
||||||
languageVersion = JavaLanguageVersion.of(11)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tasks.withType(JavaCompile).configureEach {
|
tasks.withType(JavaCompile).configureEach {
|
||||||
javaCompiler = javaToolchains.compilerFor {
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
languageVersion = JavaLanguageVersion.of(11)
|
languageVersion = JavaLanguageVersion.of(11)
|
||||||
@ -253,7 +229,6 @@ subprojects {
|
|||||||
languageVersion = JavaLanguageVersion.of(11)
|
languageVersion = JavaLanguageVersion.of(11)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
afterEvaluate {
|
afterEvaluate {
|
||||||
if (project.plugins.hasPlugin('pegasus')) {
|
if (project.plugins.hasPlugin('pegasus')) {
|
||||||
|
|||||||
1
docker/broker/env/docker.env
vendored
1
docker/broker/env/docker.env
vendored
@ -5,3 +5,4 @@ KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9
|
|||||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
||||||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
||||||
KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
||||||
|
KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false
|
||||||
@ -14,7 +14,7 @@ FROM --platform=$BUILDPLATFORM node:16.13.0-alpine3.14 AS prod-build
|
|||||||
|
|
||||||
# Upgrade Alpine and base packages
|
# Upgrade Alpine and base packages
|
||||||
RUN apk --no-cache --update-cache --available upgrade \
|
RUN apk --no-cache --update-cache --available upgrade \
|
||||||
&& apk --no-cache add perl openjdk8
|
&& apk --no-cache add perl openjdk8 openjdk11
|
||||||
|
|
||||||
ARG USE_SYSTEM_NODE="true"
|
ARG USE_SYSTEM_NODE="true"
|
||||||
ENV CI=true
|
ENV CI=true
|
||||||
|
|||||||
@ -28,7 +28,7 @@ FROM --platform=$BUILDPLATFORM alpine:3.14 AS prod-build
|
|||||||
|
|
||||||
# Upgrade Alpine and base packages
|
# Upgrade Alpine and base packages
|
||||||
RUN apk --no-cache --update-cache --available upgrade \
|
RUN apk --no-cache --update-cache --available upgrade \
|
||||||
&& apk --no-cache add openjdk8 perl
|
&& apk --no-cache add openjdk8 openjdk11 perl
|
||||||
|
|
||||||
COPY . /datahub-src
|
COPY . /datahub-src
|
||||||
RUN cd /datahub-src && ./gradlew :metadata-service:war:build -x test
|
RUN cd /datahub-src && ./gradlew :metadata-service:war:build -x test
|
||||||
|
|||||||
@ -3,7 +3,7 @@ ARG APP_ENV=prod
|
|||||||
|
|
||||||
FROM acryldata/datahub-ingestion-base as base
|
FROM acryldata/datahub-ingestion-base as base
|
||||||
|
|
||||||
FROM openjdk:8 as prod-build
|
FROM openjdk:11 as prod-build
|
||||||
COPY . /datahub-src
|
COPY . /datahub-src
|
||||||
RUN cd /datahub-src && ./gradlew :metadata-events:mxe-schemas:build
|
RUN cd /datahub-src && ./gradlew :metadata-events:mxe-schemas:build
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ FROM --platform=$BUILDPLATFORM alpine:3.14.2 AS prod-build
|
|||||||
|
|
||||||
# Upgrade Alpine and base packages
|
# Upgrade Alpine and base packages
|
||||||
RUN apk --no-cache --update-cache --available upgrade \
|
RUN apk --no-cache --update-cache --available upgrade \
|
||||||
&& apk --no-cache add openjdk8 perl
|
&& apk --no-cache add openjdk8 openjdk11 perl
|
||||||
|
|
||||||
COPY . datahub-src
|
COPY . datahub-src
|
||||||
RUN cd datahub-src && ./gradlew :metadata-jobs:mae-consumer-job:build -x test
|
RUN cd datahub-src && ./gradlew :metadata-jobs:mae-consumer-job:build -x test
|
||||||
|
|||||||
@ -25,7 +25,7 @@ FROM --platform=$BUILDPLATFORM alpine:3.14.2 AS prod-build
|
|||||||
|
|
||||||
# Upgrade Alpine and base packages
|
# Upgrade Alpine and base packages
|
||||||
RUN apk --no-cache --update-cache --available upgrade \
|
RUN apk --no-cache --update-cache --available upgrade \
|
||||||
&& apk --no-cache add openjdk8 perl
|
&& apk --no-cache add openjdk8 openjdk11 perl
|
||||||
|
|
||||||
COPY . datahub-src
|
COPY . datahub-src
|
||||||
RUN cd datahub-src && ./gradlew :metadata-jobs:mce-consumer-job:build
|
RUN cd datahub-src && ./gradlew :metadata-jobs:mce-consumer-job:build
|
||||||
|
|||||||
@ -26,7 +26,7 @@ FROM --platform=$BUILDPLATFORM alpine:3.14 AS prod-build
|
|||||||
|
|
||||||
# Upgrade Alpine and base packages
|
# Upgrade Alpine and base packages
|
||||||
RUN apk --no-cache --update-cache --available upgrade \
|
RUN apk --no-cache --update-cache --available upgrade \
|
||||||
&& apk --no-cache add openjdk8 perl
|
&& apk --no-cache add openjdk8 openjdk11 perl
|
||||||
|
|
||||||
COPY . datahub-src
|
COPY . datahub-src
|
||||||
RUN cd datahub-src && ./gradlew :datahub-upgrade:build
|
RUN cd datahub-src && ./gradlew :datahub-upgrade:build
|
||||||
|
|||||||
@ -14,7 +14,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "2181:2181"
|
- "2181:2181"
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
|
|
||||||
broker:
|
broker:
|
||||||
image: confluentinc/cp-kafka:5.4.0
|
image: confluentinc/cp-kafka:5.4.0
|
||||||
|
|||||||
@ -14,7 +14,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
|
|
||||||
broker:
|
broker:
|
||||||
image: confluentinc/cp-kafka:5.4.0
|
image: confluentinc/cp-kafka:5.4.0
|
||||||
|
|||||||
@ -14,7 +14,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
|
|
||||||
broker:
|
broker:
|
||||||
image: confluentinc/cp-kafka:5.4.0
|
image: confluentinc/cp-kafka:5.4.0
|
||||||
|
|||||||
@ -14,6 +14,7 @@ services:
|
|||||||
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
||||||
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
||||||
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
||||||
|
- KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false
|
||||||
hostname: broker
|
hostname: broker
|
||||||
image: confluentinc/cp-kafka:7.2.0
|
image: confluentinc/cp-kafka:7.2.0
|
||||||
ports:
|
ports:
|
||||||
@ -176,7 +177,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
version: '2.3'
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
esdata: null
|
esdata: null
|
||||||
|
|||||||
@ -14,6 +14,7 @@ services:
|
|||||||
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
||||||
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
||||||
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
||||||
|
- KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false
|
||||||
hostname: broker
|
hostname: broker
|
||||||
image: confluentinc/cp-kafka:5.4.0
|
image: confluentinc/cp-kafka:5.4.0
|
||||||
ports:
|
ports:
|
||||||
@ -178,7 +179,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
version: '2.3'
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
esdata: null
|
esdata: null
|
||||||
|
|||||||
@ -14,6 +14,7 @@ services:
|
|||||||
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
|
||||||
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
|
||||||
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
- KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
|
||||||
|
- KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE=false
|
||||||
hostname: broker
|
hostname: broker
|
||||||
image: confluentinc/cp-kafka:5.4.0
|
image: confluentinc/cp-kafka:5.4.0
|
||||||
ports:
|
ports:
|
||||||
@ -198,7 +199,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/opt/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
version: '2.3'
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
broker: null
|
broker: null
|
||||||
|
|||||||
@ -5,14 +5,14 @@ title: "Local Development"
|
|||||||
# DataHub Developer's Guide
|
# DataHub Developer's Guide
|
||||||
|
|
||||||
## Pre-requirements
|
## Pre-requirements
|
||||||
- [Java 1.8 SDK](https://adoptopenjdk.net/?variant=openjdk8&jvmVariant=hotspot)
|
- [Java 11 SDK](https://openjdk.org/projects/jdk/11/)
|
||||||
- [Docker](https://www.docker.com/)
|
- [Docker](https://www.docker.com/)
|
||||||
- [Docker Compose](https://docs.docker.com/compose/)
|
- [Docker Compose](https://docs.docker.com/compose/)
|
||||||
- Docker engine with at least 8GB of memory to run tests.
|
- Docker engine with at least 8GB of memory to run tests.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
|
||||||
Do not try to use a JDK newer than JDK 8. The build process does not work with newer JDKs currently.
|
Do not try to use a JDK newer than JDK 11. The build process does not work with newer JDKs currently.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@ -101,7 +101,7 @@ You're probably using a Java version that's too new for gradle. Run the followin
|
|||||||
```
|
```
|
||||||
java --version
|
java --version
|
||||||
```
|
```
|
||||||
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 1.8](https://www.oracle.com/java/technologies/javase/javase-jdk8-downloads.html) (aka Java 8). Plan for Java 11 migration is being discussed in [this issue](https://github.com/datahub-project/datahub/issues/1699).
|
While it may be possible to build and run DataHub using newer versions of Java, we currently only support [Java 11](https://openjdk.org/projects/jdk/11/) (aka Java 11).
|
||||||
|
|
||||||
### Getting `cannot find symbol` error for `javax.annotation.Generated`
|
### Getting `cannot find symbol` error for `javax.annotation.Generated`
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ This file documents any backwards-incompatible changes in DataHub and assists pe
|
|||||||
## Next
|
## Next
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
- Java version 11 or greater is required.
|
||||||
|
|
||||||
### Potential Downtime
|
### Potential Downtime
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,17 @@
|
|||||||
apply plugin: 'java'
|
apply plugin: 'java'
|
||||||
apply plugin: 'pegasus'
|
apply plugin: 'pegasus'
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile spec.product.pegasus.data
|
compile spec.product.pegasus.data
|
||||||
compile externalDependency.commonsLang
|
compile externalDependency.commonsLang
|
||||||
|
|||||||
@ -13,6 +13,17 @@ import org.apache.tools.ant.filters.ReplaceTokens
|
|||||||
|
|
||||||
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
||||||
implementation project(':metadata-models')
|
implementation project(':metadata-models')
|
||||||
@ -22,7 +33,7 @@ dependencies {
|
|||||||
because 'Vulnerability Issue'
|
because 'Vulnerability Issue'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
shadow externalDependency.httpAsyncClient // we want our clients to provide this
|
compileOnly externalDependency.httpAsyncClient
|
||||||
implementation externalDependency.jacksonDataBind
|
implementation externalDependency.jacksonDataBind
|
||||||
implementation externalDependency.javaxValidation
|
implementation externalDependency.javaxValidation
|
||||||
implementation externalDependency.springContext
|
implementation externalDependency.springContext
|
||||||
@ -33,11 +44,11 @@ dependencies {
|
|||||||
|
|
||||||
compileOnly externalDependency.lombok
|
compileOnly externalDependency.lombok
|
||||||
annotationProcessor externalDependency.lombok
|
annotationProcessor externalDependency.lombok
|
||||||
testCompile externalDependency.httpAsyncClient // needed as shadow excludes it
|
|
||||||
testCompile externalDependency.mockito
|
testCompile externalDependency.mockito
|
||||||
testCompile externalDependency.mockServer
|
testCompile externalDependency.mockServer
|
||||||
testCompile externalDependency.mockServerClient
|
testCompile externalDependency.mockServerClient
|
||||||
testCompile externalDependency.testContainers
|
testCompile externalDependency.testContainers
|
||||||
|
testCompile externalDependency.httpAsyncClient
|
||||||
|
|
||||||
swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.33'
|
swaggerCodegen 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.33'
|
||||||
}
|
}
|
||||||
@ -72,12 +83,10 @@ task checkShadowJar(type: Exec) {
|
|||||||
shadowJar {
|
shadowJar {
|
||||||
zip64=true
|
zip64=true
|
||||||
archiveClassifier = ''
|
archiveClassifier = ''
|
||||||
dependencies {
|
// preventing java multi-release JAR leakage
|
||||||
exclude(dependency('org.apache.httpcomponents:httpasyncclient'))
|
// https://github.com/johnrengelman/shadow/issues/729
|
||||||
exclude 'LICENSE'
|
exclude('module-info.class', 'META-INF/versions/**',
|
||||||
exclude 'NOTICE'
|
'**/LICENSE', '**/LICENSE.txt', '**/NOTICE', '**/NOTICE.txt')
|
||||||
exclude 'LICENSE.txt'
|
|
||||||
}
|
|
||||||
mergeServiceFiles()
|
mergeServiceFiles()
|
||||||
// we relocate namespaces manually, because we want to know exactly which libs we are exposing and why
|
// we relocate namespaces manually, because we want to know exactly which libs we are exposing and why
|
||||||
// we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first
|
// we can move to automatic relocation using ConfigureShadowRelocation after we get to a good place on these first
|
||||||
|
|||||||
@ -11,6 +11,17 @@ apply from: '../versioning.gradle'
|
|||||||
|
|
||||||
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
//to rename artifacts for publish
|
//to rename artifacts for publish
|
||||||
project.archivesBaseName = 'datahub-'+project.name
|
project.archivesBaseName = 'datahub-'+project.name
|
||||||
|
|
||||||
@ -44,15 +55,14 @@ dependencies {
|
|||||||
|
|
||||||
implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow')
|
implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow')
|
||||||
|
|
||||||
|
|
||||||
provided(externalDependency.sparkSql)
|
provided(externalDependency.sparkSql)
|
||||||
provided(externalDependency.sparkHive)
|
provided(externalDependency.sparkHive)
|
||||||
|
implementation externalDependency.httpAsyncClient
|
||||||
|
|
||||||
// Tests need a concrete log4j available. Providing it here
|
// Tests need a concrete log4j available. Providing it here
|
||||||
testImplementation 'org.apache.logging.log4j:log4j-api:2.17.1'
|
testImplementation 'org.apache.logging.log4j:log4j-api:2.17.1'
|
||||||
testImplementation 'org.apache.logging.log4j:log4j-core:2.17.1'
|
testImplementation 'org.apache.logging.log4j:log4j-core:2.17.1'
|
||||||
|
|
||||||
|
|
||||||
testImplementation(externalDependency.postgresql){
|
testImplementation(externalDependency.postgresql){
|
||||||
exclude group: "com.fasterxml.jackson.core"
|
exclude group: "com.fasterxml.jackson.core"
|
||||||
}
|
}
|
||||||
@ -92,8 +102,12 @@ shadowJar {
|
|||||||
exclude(dependency {
|
exclude(dependency {
|
||||||
exclude_modules.contains(it.name)
|
exclude_modules.contains(it.name)
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// preventing java multi-release JAR leakage
|
||||||
|
// https://github.com/johnrengelman/shadow/issues/729
|
||||||
|
exclude('module-info.class', 'META-INF/versions/**')
|
||||||
|
|
||||||
relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson'
|
relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson'
|
||||||
relocate 'org.apache.http','datahub.spark2.shaded.http'
|
relocate 'org.apache.http','datahub.spark2.shaded.http'
|
||||||
relocate 'org.apache.commons.codec', 'datahub.spark2.shaded.o.a.c.codec'
|
relocate 'org.apache.commons.codec', 'datahub.spark2.shaded.o.a.c.codec'
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/javahdfsin2hdfsout1"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -69,7 +69,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hdfsout1/queryexecid_4"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -19,7 +19,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/javahdfsin2hdfsout2"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -55,7 +55,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hdfsout2/queryexecid_4"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -132,7 +132,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hdfsout2/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/javahdfsin2hivecreateinserttable"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -61,7 +61,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hivecreateinserttable/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -107,7 +107,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hivecreateinserttable/queryexecid_6"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -167,7 +167,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hivecreateinserttable/queryexecid_7"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -19,7 +19,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/javahdfsin2hivecreatetable"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -64,7 +64,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahdfsin2hivecreatetable/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/javahiveinhiveout"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -55,7 +55,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahiveinhiveout/queryexecid_8"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -131,7 +131,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahiveinhiveout/queryexecid_9"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -163,7 +163,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahiveinhiveout/queryexecid_10"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -233,7 +233,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/javahiveinhiveout/queryexecid_11"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/pythonhdfsin2hdfsout1"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -61,7 +61,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hdfsout1/queryexecid_4"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/pythonhdfsin2hdfsout2"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -80,7 +80,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hdfsout2/queryexecid_4"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -118,7 +118,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hdfsout2/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/pythonhdfsin2hivecreateinserttable"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -66,7 +66,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hivecreateinserttable/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -102,7 +102,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hivecreateinserttable/queryexecid_6"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -165,7 +165,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hivecreateinserttable/queryexecid_7"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -23,7 +23,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/pythonhdfsin2hivecreatetable"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -69,7 +69,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhdfsin2hivecreatetable/queryexecid_5"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/spark_spark-master_7077/pythonhiveinhiveout"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -79,7 +79,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhiveinhiveout/queryexecid_10"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -130,7 +130,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhiveinhiveout/queryexecid_9"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -157,7 +157,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhiveinhiveout/queryexecid_8"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -219,7 +219,7 @@
|
|||||||
{
|
{
|
||||||
"com.linkedin.common.BrowsePaths": {
|
"com.linkedin.common.BrowsePaths": {
|
||||||
"paths": [
|
"paths": [
|
||||||
"/spark/pythonhiveinhiveout/queryexecid_11"
|
"/spark/spark_spark-master_7077"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
|
|
||||||
echo "--------------------------------------------------------------------"
|
echo "--------------------------------------------------------------------"
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
# Script assumptions:
|
# Script assumptions:
|
||||||
# - The gradle build has already been run.
|
# - The gradle build has already been run.
|
||||||
# - Python 3.6+ is installed and in the PATH.
|
# - Python 3.6+ is installed and in the PATH.
|
||||||
|
|||||||
@ -17,6 +17,17 @@ repositories {
|
|||||||
jcenter()
|
jcenter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.apache.spark:spark-sql_2.11:2.4.8'
|
implementation 'org.apache.spark:spark-sql_2.11:2.4.8'
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-bin.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-6.9.2-bin.zip
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
zipStorePath=wrapper/dists
|
zipStorePath=wrapper/dists
|
||||||
|
|||||||
@ -74,4 +74,6 @@ def test_ingestion_via_rest(json_file):
|
|||||||
data = response.json()
|
data = response.json()
|
||||||
diff = json_compare.check(value, data)
|
diff = json_compare.check(value, data)
|
||||||
print(urn)
|
print(urn)
|
||||||
|
if diff != NO_DIFF:
|
||||||
|
print("Expected: {} Actual: {}".format(value, data))
|
||||||
assert diff == NO_DIFF
|
assert diff == NO_DIFF
|
||||||
|
|||||||
@ -17,7 +17,6 @@ import org.junit.After;
|
|||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.ClassRule;
|
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.TestRule;
|
import org.junit.rules.TestRule;
|
||||||
@ -29,7 +28,6 @@ import org.mockserver.model.HttpResponse;
|
|||||||
import org.mockserver.model.JsonBody;
|
import org.mockserver.model.JsonBody;
|
||||||
import org.mockserver.socket.PortFactory;
|
import org.mockserver.socket.PortFactory;
|
||||||
import org.mockserver.verify.VerificationTimes;
|
import org.mockserver.verify.VerificationTimes;
|
||||||
import org.testcontainers.containers.PostgreSQLContainer;
|
|
||||||
|
|
||||||
import com.linkedin.common.FabricType;
|
import com.linkedin.common.FabricType;
|
||||||
|
|
||||||
@ -58,9 +56,6 @@ public class TestCoalesceJobLineage {
|
|||||||
private static final String PIPELINE_PLATFORM_INSTANCE = "test_machine";
|
private static final String PIPELINE_PLATFORM_INSTANCE = "test_machine";
|
||||||
private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset";
|
private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset";
|
||||||
|
|
||||||
@ClassRule
|
|
||||||
public static PostgreSQLContainer<?> db = new PostgreSQLContainer<>("postgres:9.6.12")
|
|
||||||
.withDatabaseName("sparkcoalescetestdb");
|
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
private static Properties jdbcConnnProperties;
|
private static Properties jdbcConnnProperties;
|
||||||
private static ClientAndServer mockServer;
|
private static ClientAndServer mockServer;
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import java.io.IOException;
|
|||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.sql.Connection;
|
import java.sql.Connection;
|
||||||
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@ -54,6 +55,7 @@ import datahub.spark.model.dataset.CatalogTableDataset;
|
|||||||
import datahub.spark.model.dataset.HdfsPathDataset;
|
import datahub.spark.model.dataset.HdfsPathDataset;
|
||||||
import datahub.spark.model.dataset.JdbcDataset;
|
import datahub.spark.model.dataset.JdbcDataset;
|
||||||
import datahub.spark.model.dataset.SparkDataset;
|
import datahub.spark.model.dataset.SparkDataset;
|
||||||
|
import org.testcontainers.containers.wait.strategy.Wait;
|
||||||
|
|
||||||
//!!!! IMP !!!!!!!!
|
//!!!! IMP !!!!!!!!
|
||||||
//Add the test number before naming the test. This will ensure that tests run in specified order.
|
//Add the test number before naming the test. This will ensure that tests run in specified order.
|
||||||
@ -86,8 +88,12 @@ public class TestSparkJobsLineage {
|
|||||||
private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset";
|
private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset";
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static PostgreSQLContainer<?> db = new PostgreSQLContainer<>("postgres:9.6.12")
|
public static PostgreSQLContainer<?> db;
|
||||||
|
static {
|
||||||
|
db = new PostgreSQLContainer<>("postgres:9.6.12")
|
||||||
.withDatabaseName("sparktestdb");
|
.withDatabaseName("sparktestdb");
|
||||||
|
db.waitingFor(Wait.forListeningPort()).withStartupTimeout(Duration.ofMinutes(15)).start();
|
||||||
|
}
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
private static Properties jdbcConnnProperties;
|
private static Properties jdbcConnnProperties;
|
||||||
private static DatasetLineageAccumulator acc;
|
private static DatasetLineageAccumulator acc;
|
||||||
|
|||||||
@ -101,7 +101,7 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) {
|
|||||||
it.setAdditionalProperties([
|
it.setAdditionalProperties([
|
||||||
"group-id" : "io.datahubproject",
|
"group-id" : "io.datahubproject",
|
||||||
"dateLibrary" : "java8",
|
"dateLibrary" : "java8",
|
||||||
"java8" : "true",
|
"java11" : "true",
|
||||||
"modelPropertyNaming" : "original",
|
"modelPropertyNaming" : "original",
|
||||||
"modelPackage" : "io.datahubproject.openapi.generated"] as Map<String, String>)
|
"modelPackage" : "io.datahubproject.openapi.generated"] as Map<String, String>)
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,16 @@ import io.datahubproject.GenerateJsonSchemaTask
|
|||||||
|
|
||||||
apply plugin: 'pegasus'
|
apply plugin: 'pegasus'
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile spec.product.pegasus.data
|
compile spec.product.pegasus.data
|
||||||
|
|||||||
@ -34,7 +34,6 @@ import io.datahubproject.openapi.generated.MetadataChangeProposal;
|
|||||||
import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue;
|
import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue;
|
||||||
import io.datahubproject.openapi.generated.OneOfGenericAspectValue;
|
import io.datahubproject.openapi.generated.OneOfGenericAspectValue;
|
||||||
import io.datahubproject.openapi.generated.Status;
|
import io.datahubproject.openapi.generated.Status;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
@ -46,11 +45,7 @@ import java.util.stream.Collectors;
|
|||||||
import javax.annotation.Nonnull;
|
import javax.annotation.Nonnull;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.reflections.Reflections;
|
import org.reflections.Reflections;
|
||||||
import org.reflections.scanners.ResourcesScanner;
|
|
||||||
import org.reflections.scanners.SubTypesScanner;
|
import org.reflections.scanners.SubTypesScanner;
|
||||||
import org.reflections.util.ClasspathHelper;
|
|
||||||
import org.reflections.util.ConfigurationBuilder;
|
|
||||||
import org.reflections.util.FilterBuilder;
|
|
||||||
import org.springframework.beans.factory.config.BeanDefinition;
|
import org.springframework.beans.factory.config.BeanDefinition;
|
||||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||||
import org.springframework.core.type.filter.AssignableTypeFilter;
|
import org.springframework.core.type.filter.AssignableTypeFilter;
|
||||||
@ -98,17 +93,10 @@ public class MappingUtil {
|
|||||||
components = provider.findCandidateComponents("io/datahubproject/openapi/generated");
|
components = provider.findCandidateComponents("io/datahubproject/openapi/generated");
|
||||||
components.forEach(MappingUtil::putGenericAspectEntry);
|
components.forEach(MappingUtil::putGenericAspectEntry);
|
||||||
|
|
||||||
List<ClassLoader> classLoadersList = new ArrayList<>();
|
|
||||||
classLoadersList.add(ClasspathHelper.contextClassLoader());
|
|
||||||
classLoadersList.add(ClasspathHelper.staticClassLoader());
|
|
||||||
|
|
||||||
// Build a map from fully qualified Pegasus generated class name to class
|
// Build a map from fully qualified Pegasus generated class name to class
|
||||||
Reflections reflections = new Reflections(new ConfigurationBuilder()
|
new Reflections(PEGASUS_PACKAGE, new SubTypesScanner(false))
|
||||||
.setScanners(new SubTypesScanner(false), new ResourcesScanner())
|
.getSubTypesOf(RecordTemplate.class)
|
||||||
.setUrls(ClasspathHelper.forClassLoader(classLoadersList.toArray(new ClassLoader[0])))
|
.forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass));
|
||||||
.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(PEGASUS_PACKAGE))));
|
|
||||||
Set<Class<? extends RecordTemplate>> pegasusComponents = reflections.getSubTypesOf(RecordTemplate.class);
|
|
||||||
pegasusComponents.forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, EntityResponse> mapServiceResponse(Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse,
|
public static Map<String, EntityResponse> mapServiceResponse(Map<Urn, com.linkedin.entity.EntityResponse> serviceResponse,
|
||||||
|
|||||||
@ -1,6 +1,17 @@
|
|||||||
apply plugin: 'pegasus'
|
apply plugin: 'pegasus'
|
||||||
apply plugin: 'java'
|
apply plugin: 'java'
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile).configureEach {
|
||||||
|
javaCompiler = javaToolchains.compilerFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks.withType(Test).configureEach {
|
||||||
|
javaLauncher = javaToolchains.launcherFor {
|
||||||
|
languageVersion = JavaLanguageVersion.of(8)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile spec.product.pegasus.data
|
compile spec.product.pegasus.data
|
||||||
compile externalDependency.commonsIo
|
compile externalDependency.commonsIo
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user