mirror of
https://github.com/datahub-project/datahub.git
synced 2025-06-27 05:03:31 +00:00
build: optimizations for incremental builds and faster CI (#13033)
Co-authored-by: Andrew Sikowitz <andrew.sikowitz@acryl.io>
This commit is contained in:
parent
87af4b9d53
commit
40106be208
2
.github/workflows/docker-unified.yml
vendored
2
.github/workflows/docker-unified.yml
vendored
@ -447,7 +447,7 @@ jobs:
|
||||
- name: Pre-build artifacts for docker image
|
||||
run: |
|
||||
./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel
|
||||
mv ./datahub-frontend/build/distributions/datahub-frontend-*.zip datahub-frontend.zip
|
||||
mv ./datahub-frontend/build/stage/main .
|
||||
- name: Build and push
|
||||
uses: ./.github/actions/docker-custom-build-and-push
|
||||
with:
|
||||
|
@ -91,7 +91,7 @@ plugins {
|
||||
// TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0"
|
||||
}
|
||||
|
||||
apply from: "gradle/docker/docker.gradle"
|
||||
apply from: "gradle/docker/docker-utils.gradle"
|
||||
|
||||
project.ext.spec = [
|
||||
'product' : [
|
||||
@ -112,12 +112,12 @@ project.ext.spec = [
|
||||
|
||||
project.ext.externalDependency = [
|
||||
'akkaHttp': "com.typesafe.akka:akka-http-core_$playScalaVersion:10.2.10", // max version due to licensing
|
||||
'akkaParsing': "com.typesafe.akka:akka-parsing_$playScalaVersion:10.2.10", // akka-parsing is part of akka-http, so use akka http version
|
||||
'akkaActor': "com.typesafe.akka:akka-actor_$playScalaVersion:$akkaVersion",
|
||||
'akkaStream': "com.typesafe.akka:akka-stream_$playScalaVersion:$akkaVersion",
|
||||
'akkaActorTyped': "com.typesafe.akka:akka-actor-typed_$playScalaVersion:$akkaVersion",
|
||||
'akkaSlf4j': "com.typesafe.akka:akka-slf4j_$playScalaVersion:$akkaVersion",
|
||||
'akkaJackson': "com.typesafe.akka:akka-serialization-jackson_$playScalaVersion:$akkaVersion",
|
||||
'akkaParsing': "com.typesafe.akka:akka-parsing_$playScalaVersion:$akkaVersion",
|
||||
'akkaProtobuf': "com.typesafe.akka:akka-protobuf-v3_$playScalaVersion:$akkaVersion",
|
||||
'antlr4Runtime': 'org.antlr:antlr4-runtime:4.9.3',
|
||||
'antlr4': 'org.antlr:antlr4:4.9.3',
|
||||
|
@ -1,12 +1,12 @@
|
||||
plugins {
|
||||
id 'scala'
|
||||
id 'com.palantir.docker'
|
||||
id 'org.gradle.playframework'
|
||||
}
|
||||
|
||||
apply from: '../gradle/versioning/versioning.gradle'
|
||||
apply from: './play.gradle'
|
||||
apply from: '../gradle/coverage/java-coverage.gradle'
|
||||
apply from: '../gradle/docker/docker.gradle'
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-frontend-react'
|
||||
@ -34,7 +34,7 @@ model {
|
||||
}
|
||||
|
||||
task myTar(type: Tar) {
|
||||
compression = Compression.GZIP
|
||||
compression = Compression.NONE
|
||||
|
||||
from("${buildDir}/stage")
|
||||
|
||||
@ -66,10 +66,19 @@ distributions {
|
||||
}
|
||||
}
|
||||
|
||||
task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', distZip]) {
|
||||
|
||||
from ("${buildDir}/distributions")
|
||||
include "datahub-frontend-${version}.zip"
|
||||
into "${buildDir}/distributions"
|
||||
rename "datahub-frontend-${version}.zip", "datahub-frontend.zip"
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(stageMainDist)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files "${buildDir}/stage"
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
include 'docker/monitoring/*'
|
||||
@ -77,12 +86,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -101,20 +105,6 @@ docker {
|
||||
}
|
||||
}
|
||||
|
||||
task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', dist]) {
|
||||
from ("${buildDir}/distributions")
|
||||
include "datahub-frontend-${version}.zip"
|
||||
into "${buildDir}/docker/"
|
||||
rename "datahub-frontend-${version}.zip", "datahub-frontend.zip"
|
||||
}
|
||||
tasks.getByPath(":datahub-frontend:docker").dependsOn(unversionZip)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
||||
// gradle 8 fixes
|
||||
tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist'
|
||||
|
@ -1,6 +1,6 @@
|
||||
plugins {
|
||||
id 'java'
|
||||
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
|
||||
id "io.github.kobylynskyi.graphql.codegen" version "5.10.0"
|
||||
}
|
||||
|
||||
apply from: '../gradle/coverage/java-coverage.gradle'
|
||||
@ -41,6 +41,7 @@ graphqlCodegen {
|
||||
generateApis = true
|
||||
generateParameterizedFieldsResolvers = false
|
||||
modelValidationAnnotation = "@javax.annotation.Nonnull"
|
||||
addGeneratedAnnotation = false // Skips timestamps in generated files which forces re-compile
|
||||
customTypesMapping = [
|
||||
Long: "Long",
|
||||
Float: "Float"
|
||||
|
@ -1,11 +1,11 @@
|
||||
plugins {
|
||||
id 'org.springframework.boot'
|
||||
id 'java'
|
||||
id 'com.palantir.docker'
|
||||
}
|
||||
|
||||
apply from: "../gradle/versioning/versioning.gradle"
|
||||
apply from: "../gradle/coverage/java-coverage.gradle"
|
||||
apply from: "../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
|
||||
@ -169,8 +169,8 @@ task runNoCode(type: Exec) {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(bootJar)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||
files bootJar.outputs.files
|
||||
files fileTree(rootProject.projectDir) {
|
||||
@ -181,12 +181,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -204,12 +199,4 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar])
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
||||
|
3
datahub-web-react/.gitignore
vendored
3
datahub-web-react/.gitignore
vendored
@ -28,3 +28,6 @@ yarn-error.log*
|
||||
# gql codegen
|
||||
*.generated.ts
|
||||
/.vscode
|
||||
|
||||
.yarn-test-sentinel
|
||||
.yarn-lint-sentinel
|
@ -80,6 +80,19 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
|
||||
task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
|
||||
// Explicitly runs in non-watch mode.
|
||||
args = ['run', project.hasProperty('withCoverage') ? 'test-coverage' : 'test', 'run']
|
||||
def test_sentinel = "${buildDir}/.yarn-test-sentinel"
|
||||
outputs.file(test_sentinel)
|
||||
inputs.files(project.fileTree(dir: 'src', include: ['**/*.ts', '**/*.tsx']))
|
||||
doLast {
|
||||
// touch a file with name yarn-lint.txt in the build directory
|
||||
def file = file(test_sentinel)
|
||||
if (!file.exists()) {
|
||||
file.createNewFile()
|
||||
} else {
|
||||
file.setLastModified(System.currentTimeMillis())
|
||||
}
|
||||
}
|
||||
outputs.cacheIf { true }
|
||||
}
|
||||
|
||||
task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
|
||||
@ -90,6 +103,19 @@ test.dependsOn([yarnInstall, yarnTest, yarnLint])
|
||||
|
||||
task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
|
||||
args = ['run', 'lint-fix']
|
||||
def lint_sentinel = "${buildDir}/.yarn-lint-sentinel"
|
||||
outputs.file(lint_sentinel)
|
||||
inputs.files(project.fileTree(dir: 'src', include: ['**/*.ts', '**/*.tsx']))
|
||||
doLast {
|
||||
// touch a file with name yarn-lint.txt in the build directory
|
||||
def file = file(lint_sentinel)
|
||||
if (!file.exists()) {
|
||||
file.createNewFile()
|
||||
} else {
|
||||
file.setLastModified(System.currentTimeMillis())
|
||||
}
|
||||
}
|
||||
outputs.cacheIf { true }
|
||||
}
|
||||
|
||||
task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) {
|
||||
@ -130,9 +156,9 @@ distZip {
|
||||
}
|
||||
|
||||
jar {
|
||||
dependsOn distZip
|
||||
dependsOn yarnBuild
|
||||
into('public') {
|
||||
from zipTree(distZip.outputs.files.first())
|
||||
from 'dist'
|
||||
}
|
||||
archiveClassifier = 'assets'
|
||||
}
|
||||
|
@ -80,11 +80,25 @@ ext {
|
||||
preserveVolumes: true
|
||||
]
|
||||
]
|
||||
|
||||
// only for debug variants of quickstart to enable <variant>Reload tasks.
|
||||
// The actual service name needs the profile to be appended, <container-name>-<profile>
|
||||
moduleToContainer = [
|
||||
':metadata-service:war': 'datahub-gms',
|
||||
':datahub-frontend': 'frontend',
|
||||
':datahub-upgrade': 'system-update',
|
||||
':metadata-jobs:mce-consumer-job': 'datahub-mce-consumer',
|
||||
':metadata-jobs:mae-consumer-job': 'datahub-mae-consumer',
|
||||
|
||||
]
|
||||
}
|
||||
|
||||
// Register all quickstart tasks
|
||||
quickstart_configs.each { taskName, config ->
|
||||
tasks.register(taskName)
|
||||
tasks.register(taskName) {
|
||||
group = 'quickstart'
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Dynamically create all quickstart tasks and configurations
|
||||
@ -150,12 +164,57 @@ quickstart_configs.each { taskName, config ->
|
||||
}
|
||||
}
|
||||
|
||||
// Register all quickstart tasks
|
||||
quickstart_configs.each { taskName, config ->
|
||||
tasks.register("prepareAll${taskName}"){
|
||||
group = 'quickstart-ci'
|
||||
}
|
||||
}
|
||||
|
||||
quickstart_configs.each { taskName, config ->
|
||||
if (config.modules) {
|
||||
tasks.getByName("prepareAll${taskName}").dependsOn(
|
||||
config.modules.collect { it + ':dockerPrepare' }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
quickstart_configs.each { taskName, config ->
|
||||
tasks.register("buildImagesFromCache${taskName}") {
|
||||
group = 'quickstart-ci'
|
||||
}
|
||||
}
|
||||
|
||||
quickstart_configs.each { taskName, config ->
|
||||
if (config.modules) {
|
||||
tasks.getByName("buildImagesFromCache${taskName}").dependsOn(
|
||||
config.modules.collect { it + ':dockerFromCache' }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
quickstart_configs.each { taskName, config ->
|
||||
tasks.register("buildImages${taskName}") {
|
||||
group = 'quickstart-ci'
|
||||
}
|
||||
}
|
||||
|
||||
quickstart_configs.each { taskName, config ->
|
||||
if (config.modules) {
|
||||
tasks.getByName("buildImages${taskName}").dependsOn(
|
||||
config.modules.collect { it + ':dockerTag' }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('minDockerCompose2.20', Exec) {
|
||||
executable 'bash'
|
||||
args '-c', 'echo -e "$(docker compose version --short)\n2.20"|sort --version-sort --check=quiet --reverse'
|
||||
}
|
||||
|
||||
tasks.register('quickstartNuke') {
|
||||
group = 'quickstart'
|
||||
doFirst {
|
||||
quickstart_configs.each { taskName, config ->
|
||||
dockerCompose."${taskName}".removeVolumes = !config.preserveVolumes
|
||||
@ -165,6 +224,7 @@ tasks.register('quickstartNuke') {
|
||||
}
|
||||
|
||||
tasks.register('quickstartDown') {
|
||||
group = 'quickstart'
|
||||
finalizedBy(tasks.withType(ComposeDownForced))
|
||||
}
|
||||
|
||||
@ -173,12 +233,36 @@ tasks.withType(ComposeUp).configureEach {
|
||||
dependsOn tasks.named("minDockerCompose2.20")
|
||||
}
|
||||
|
||||
task debugReload(type: Exec) {
|
||||
def cmd = ['docker compose -p datahub --profile debug'] + ['-f', compose_base] + [
|
||||
'restart',
|
||||
'datahub-gms-debug',
|
||||
'system-update-debug',
|
||||
'frontend-debug'
|
||||
]
|
||||
// Register all quickstart Reload tasks. For quickstartDebug, the reload task is DebugReload. (Taskname without quickstart prefix)
|
||||
quickstart_configs.each { taskName, config ->
|
||||
if (config.isDebug) {
|
||||
def reloadTaskName = taskName.replaceFirst(/^quickstart/, "")
|
||||
tasks.register("${reloadTaskName}Reload", Exec) {
|
||||
dependsOn tasks.named("prepareAll${taskName}")
|
||||
group = 'quickstart'
|
||||
description = "Build and reload only changed containers for the ${taskName} task"
|
||||
doFirst {
|
||||
def executedTasks = project.gradle.taskGraph.allTasks.findAll { it.state.executed }
|
||||
def containersToRestart = []
|
||||
|
||||
moduleToContainer.each { modulePath, containerName ->
|
||||
def moduleProject = project.project(modulePath)
|
||||
def dockerPrepareTask = moduleProject.tasks.findByName('dockerPrepare')
|
||||
|
||||
if (dockerPrepareTask && executedTasks.contains(dockerPrepareTask) && !dockerPrepareTask.state.upToDate) {
|
||||
containersToRestart << "${containerName}-${config.profile}"
|
||||
}
|
||||
}
|
||||
|
||||
// Only restart containers that had their modules rebuilt
|
||||
if (containersToRestart) {
|
||||
def cmd = ["docker compose -p datahub --profile ${config.profile}"] + ['-f', compose_base] + ['restart'] + containersToRestart
|
||||
commandLine 'bash', '-c', cmd.join(" ")
|
||||
} else {
|
||||
// If no containers need restart, make this a no-op
|
||||
commandLine 'bash', '-c', 'echo "No containers need restarting - all modules are up to date"'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -25,17 +25,11 @@ RUN apk --no-cache --update-cache --available upgrade \
|
||||
|
||||
ENV LD_LIBRARY_PATH="/lib:/lib64"
|
||||
|
||||
FROM base AS unpack
|
||||
|
||||
COPY ./datahub-frontend.zip /
|
||||
RUN unzip datahub-frontend.zip -d /tmp/out \
|
||||
&& mv /tmp/out/main /datahub-frontend
|
||||
COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/
|
||||
RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend
|
||||
|
||||
FROM base AS prod-install
|
||||
|
||||
COPY --from=unpack /datahub-frontend/ /datahub-frontend/
|
||||
COPY --chown=datahub:datahub --chmod=755 ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/
|
||||
COPY --chown=datahub:datahub --chmod=755 ./docker/datahub-frontend/start.sh /
|
||||
COPY --chown=datahub:datahub --chmod=755 ./main /datahub-frontend/
|
||||
|
||||
FROM base AS dev-install
|
||||
# Dummy stage for development. Assumes code is built on your machine and mounted to this image.
|
||||
@ -43,7 +37,6 @@ FROM base AS dev-install
|
||||
VOLUME [ "/datahub-frontend" ]
|
||||
|
||||
FROM ${APP_ENV}-install AS final
|
||||
COPY --chown=datahub:datahub --chmod=755 ./docker/datahub-frontend/start.sh /
|
||||
USER datahub
|
||||
|
||||
ENV OTEL_EXPORTER_OTLP_MAX_PAYLOAD_SIZE=4194304 \
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
|
||||
@ -16,8 +16,9 @@ ext {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn build
|
||||
name "${docker_registry}/${docker_repo}:v${docker_version}"
|
||||
version "v${docker_version}"
|
||||
//version "v${docker_version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -44,19 +45,3 @@ docker {
|
||||
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
tasks.getByName('docker').dependsOn('build')
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
dockerClean.dependsOn([':docker:datahub-ingestion:dockerClean'])
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
|
||||
@ -21,8 +21,8 @@ dependencies {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn 'build', ':docker:datahub-ingestion-base:docker', ':metadata-ingestion:codegen'
|
||||
name "${docker_registry}/${docker_repo}:v${docker_version}"
|
||||
version "v${docker_version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile${docker_target == "slim" ? "-slim-only" : ""}")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -33,6 +33,7 @@ docker {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
|
||||
version "v${docker_version}"
|
||||
def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", ''), BASE_IMAGE: "${docker_registry}/datahub-ingestion-base"]
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
@ -45,20 +46,3 @@ docker {
|
||||
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
tasks.getByName('dockerPrepare').dependsOn(['build',
|
||||
':docker:datahub-ingestion-base:docker',
|
||||
':metadata-ingestion:codegen'])
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-elasticsearch-setup'
|
||||
@ -11,8 +11,8 @@ ext {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(build)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -21,12 +21,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -38,18 +33,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByName('docker').dependsOn('build')
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-kafka-setup'
|
||||
@ -11,8 +11,8 @@ ext {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(build)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -20,12 +20,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -46,18 +41,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByName('docker').dependsOn('build')
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
|
||||
@ -12,8 +12,8 @@ ext {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn build
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -21,12 +21,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -38,18 +33,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByName('docker').dependsOn('build')
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,9 +1,9 @@
|
||||
plugins {
|
||||
id 'com.palantir.docker'
|
||||
id 'java' // required for versioning
|
||||
}
|
||||
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_registry = rootProject.ext.docker_registry == 'linkedin' ? 'acryldata' : docker_registry
|
||||
@ -13,7 +13,6 @@ ext {
|
||||
|
||||
docker {
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||
files fileTree(rootProject.projectDir) {
|
||||
include '.dockerignore'
|
||||
@ -21,12 +20,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -38,18 +32,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByName('docker').dependsOn('build')
|
||||
|
||||
task mkdirBuildDocker {
|
||||
doFirst {
|
||||
mkdir "${project.buildDir}/docker"
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(mkdirBuildDocker)
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -96,7 +96,7 @@ services:
|
||||
profiles:
|
||||
- debug-consumers
|
||||
depends_on:
|
||||
system-update-debug:
|
||||
system-update-debug-consumers:
|
||||
condition: service_completed_successfully
|
||||
frontend-debug-neo4j:
|
||||
<<: *datahub-frontend-service-dev
|
||||
|
@ -248,12 +248,11 @@ services:
|
||||
condition: service_completed_successfully
|
||||
kafka-setup:
|
||||
condition: service_completed_successfully
|
||||
system-update-debug:
|
||||
system-update-debug: &system-update-debug
|
||||
<<: *datahub-system-update-service-dev
|
||||
profiles:
|
||||
- debug
|
||||
- debug-backend
|
||||
- debug-consumers
|
||||
depends_on:
|
||||
mysql-setup-dev:
|
||||
condition: service_completed_successfully
|
||||
@ -261,6 +260,10 @@ services:
|
||||
condition: service_completed_successfully
|
||||
kafka-setup-dev:
|
||||
condition: service_completed_successfully
|
||||
system-update-debug-consumers: # for consistent naming based on profile
|
||||
<<: *system-update-debug
|
||||
profiles:
|
||||
- debug-consumers
|
||||
system-update-debug-elasticsearch:
|
||||
<<: *datahub-system-update-service-dev
|
||||
profiles:
|
||||
@ -386,7 +389,7 @@ services:
|
||||
MAE_CONSUMER_ENABLED: false
|
||||
MCE_CONSUMER_ENABLED: false
|
||||
depends_on:
|
||||
system-update-debug:
|
||||
system-update-debug-consumers:
|
||||
condition: service_completed_successfully
|
||||
datahub-gms-debug-neo4j:
|
||||
<<: *datahub-gms-service-dev
|
||||
@ -414,7 +417,7 @@ services:
|
||||
depends_on:
|
||||
datahub-gms-quickstart-consumers:
|
||||
condition: service_healthy
|
||||
datahub-mae-consumer-quickstart-consumers-dev:
|
||||
datahub-mae-consumer-debug-consumers:
|
||||
<<: *datahub-mae-consumer-service-dev
|
||||
profiles:
|
||||
- debug-consumers
|
||||
@ -431,7 +434,7 @@ services:
|
||||
depends_on:
|
||||
datahub-gms-quickstart-consumers:
|
||||
condition: service_healthy
|
||||
datahub-mce-consumer-quickstart-consumers-dev:
|
||||
datahub-mce-consumer-debug-consumers:
|
||||
<<: *datahub-mce-consumer-service-dev
|
||||
profiles:
|
||||
- debug-consumers
|
||||
|
42
gradle/docker/docker-utils.gradle
Normal file
42
gradle/docker/docker-utils.gradle
Normal file
@ -0,0 +1,42 @@
|
||||
ext.getDockerImages = {
|
||||
docker_registry, docker_repo, docker_tag ->
|
||||
def stdOut = new ByteArrayOutputStream()
|
||||
exec {
|
||||
commandLine "docker", "images", "-q", "${docker_registry}/${docker_repo}:${docker_tag}"
|
||||
standardOutput = stdOut
|
||||
}
|
||||
return stdOut.toString().trim().split("\\R").findAll {!it.empty}.unique() as List
|
||||
}
|
||||
|
||||
ext.getDockerContainers = {
|
||||
docker_registry, docker_repo, docker_tag ->
|
||||
def stdOut = new ByteArrayOutputStream()
|
||||
exec {
|
||||
commandLine "docker", "container", "ls", "-q", "--filter", "ancestor=${docker_registry}/${docker_repo}:${docker_tag}"
|
||||
standardOutput = stdOut
|
||||
}
|
||||
return stdOut.toString().trim().split("\\R").findAll {!it.empty}.unique() as List
|
||||
}
|
||||
|
||||
ext.cleanLocalDockerImages = {
|
||||
String docker_registry, String docker_repo, String docker_tag ->
|
||||
println("Docker image string: ${docker_registry}/${docker_repo}:${docker_tag}")
|
||||
def containers = getDockerContainers(docker_registry, docker_repo, docker_tag)
|
||||
if(!containers.isEmpty()) {
|
||||
println "Stopping containers: $containers"
|
||||
exec {
|
||||
commandLine = ["docker", "container", "stop"] + containers
|
||||
}
|
||||
exec {
|
||||
commandLine = ["docker", "container", "rm"] + containers
|
||||
}
|
||||
}
|
||||
def images = getDockerImages(docker_registry, docker_repo, docker_tag)
|
||||
if(!images.isEmpty()) {
|
||||
println "Removing images: $images"
|
||||
exec {
|
||||
ignoreExitValue true // may not work if used by downstream image
|
||||
commandLine = ["docker", "rmi", "-f"] + images
|
||||
}
|
||||
}
|
||||
}
|
@ -1,29 +1,30 @@
|
||||
import groovy.json.JsonSlurper
|
||||
import org.apache.commons.io.output.TeeOutputStream
|
||||
|
||||
ext.getDockerImages = {
|
||||
docker_registry, docker_repo, docker_tag ->
|
||||
|
||||
def _getDockerImages(String fullImageTag) {
|
||||
def stdOut = new ByteArrayOutputStream()
|
||||
exec {
|
||||
commandLine "docker", "images", "-q", "${docker_registry}/${docker_repo}:${docker_tag}"
|
||||
commandLine "docker", "images", "-q", "${fullImageTag}"
|
||||
standardOutput = stdOut
|
||||
}
|
||||
return stdOut.toString().trim().split("\\R").findAll {!it.empty}.unique() as List
|
||||
return stdOut.toString().trim().split("\\R").findAll { !it.empty }.unique() as List
|
||||
}
|
||||
|
||||
ext.getDockerContainers = {
|
||||
docker_registry, docker_repo, docker_tag ->
|
||||
def _getDockerContainers(String fullImageTag) {
|
||||
def stdOut = new ByteArrayOutputStream()
|
||||
exec {
|
||||
commandLine "docker", "container", "ls", "-q", "--filter", "ancestor=${docker_registry}/${docker_repo}:${docker_tag}"
|
||||
commandLine "docker", "container", "ls", "-q", "--filter",
|
||||
"ancestor=${fullImageTag}"
|
||||
standardOutput = stdOut
|
||||
}
|
||||
return stdOut.toString().trim().split("\\R").findAll {!it.empty}.unique() as List
|
||||
return stdOut.toString().trim().split("\\R").findAll { !it.empty }.unique() as List
|
||||
}
|
||||
|
||||
ext.cleanLocalDockerImages = {
|
||||
String docker_registry, String docker_repo, String docker_tag ->
|
||||
println("Docker image string: ${docker_registry}/${docker_repo}:${docker_tag}")
|
||||
def containers = getDockerContainers(docker_registry, docker_repo, docker_tag)
|
||||
if(!containers.isEmpty()) {
|
||||
def _cleanLocalDockerImages(String fullImageTag) {
|
||||
println("Docker image string: ${fullImageTag}")
|
||||
def containers = _getDockerContainers(fullImageTag)
|
||||
if (!containers.isEmpty()) {
|
||||
println "Stopping containers: $containers"
|
||||
exec {
|
||||
commandLine = ["docker", "container", "stop"] + containers
|
||||
@ -32,8 +33,8 @@ ext.cleanLocalDockerImages = {
|
||||
commandLine = ["docker", "container", "rm"] + containers
|
||||
}
|
||||
}
|
||||
def images = getDockerImages(docker_registry, docker_repo, docker_tag)
|
||||
if(!images.isEmpty()) {
|
||||
def images = _getDockerImages(fullImageTag)
|
||||
if (!images.isEmpty()) {
|
||||
println "Removing images: $images"
|
||||
exec {
|
||||
ignoreExitValue true // may not work if used by downstream image
|
||||
@ -41,3 +42,191 @@ ext.cleanLocalDockerImages = {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create extension object
|
||||
class DockerPluginExtension {
|
||||
Project project
|
||||
Property<File> dockerfile
|
||||
CopySpec copySpec
|
||||
MapProperty<String, String> buildArgs
|
||||
MapProperty<String, String> tags
|
||||
ListProperty<String> platforms
|
||||
ListProperty<Object> dependencies // May contain tasks or task names
|
||||
|
||||
DockerPluginExtension(Project project) {
|
||||
this.project = project
|
||||
dockerfile = project.objects.property(File)
|
||||
buildArgs = project.objects.mapProperty(String, String)
|
||||
copySpec = project.copySpec()
|
||||
tags = project.objects.mapProperty(String, String)
|
||||
platforms = project.objects.listProperty(String)
|
||||
dependencies = project.objects.listProperty(Object)
|
||||
}
|
||||
|
||||
def files(Object... files) {
|
||||
copySpec.from(files)
|
||||
}
|
||||
|
||||
def name(String value) {
|
||||
additionalTag("", value)
|
||||
}
|
||||
|
||||
def dockerfile(File value) {
|
||||
dockerfile.set(value)
|
||||
}
|
||||
|
||||
def buildArgs(Map<String, String> values) {
|
||||
buildArgs.putAll(values)
|
||||
}
|
||||
|
||||
def platform(String... platforms) {
|
||||
this.platforms.addAll(platforms)
|
||||
}
|
||||
|
||||
def additionalTag(String name, String tag) {
|
||||
tags.put(name, tag)
|
||||
}
|
||||
|
||||
def dependsOn(Object... tasks) {
|
||||
dependencies.addAll(tasks)
|
||||
}
|
||||
}
|
||||
|
||||
def extension = project.extensions.create("docker", DockerPluginExtension)
|
||||
|
||||
project.afterEvaluate {
|
||||
def buildContext = "${rootProject.buildDir}/dockerBuildContext/${rootProject.relativePath(project.projectDir)}/docker"
|
||||
// ensure this directory exists
|
||||
new File(buildContext).mkdirs()
|
||||
println("buildContext: ${buildContext}")
|
||||
|
||||
tasks.register("dockerPrepare", Sync) {
|
||||
group "docker"
|
||||
with extension.copySpec
|
||||
from extension.dockerfile
|
||||
into buildContext
|
||||
dependsOn extension.dependencies.get()
|
||||
}
|
||||
|
||||
project.tasks.register("docker", Exec) {
|
||||
group "docker"
|
||||
description "Builds the docker image and applies all tags defined"
|
||||
dependsOn dockerPrepare
|
||||
|
||||
def marker = "${buildContext}/../imageCreated-${name}.json"
|
||||
|
||||
inputs.file(extension.dockerfile)
|
||||
inputs.dir(buildContext)
|
||||
inputs.property("tags", extension.tags)
|
||||
inputs.property("buildArgs", extension.buildArgs)
|
||||
outputs.file(marker)
|
||||
|
||||
def dockerCmd = []
|
||||
|
||||
dockerCmd += ["docker", "buildx", "build", "--load"]
|
||||
if (extension.platforms.get()) {
|
||||
dockerCmd << "--platform=${extension.platforms.get().join(',')}"
|
||||
}
|
||||
|
||||
// Add GitHub Actions specific arguments if running in GitHub workflow and using gha cache.
|
||||
// Or set DOCKER_CACHE=DEPOT -- which transparently can cache without these args when running on depot runner.
|
||||
if (System.getenv("DOCKER_CACHE") == "GITHUB") {
|
||||
def githubToken = System.getenv("GITHUB_TOKEN")
|
||||
if (githubToken) {
|
||||
dockerCmd += ["--cache-from", "type=gha,token=${githubToken}"]
|
||||
dockerCmd += ["--cache-to", "type=gha,mode=max,token=${githubToken}"]
|
||||
} else {
|
||||
dockerCmd += ["--cache-from", "type=gha"]
|
||||
dockerCmd += ["--cache-to", "type=gha,mode=max"]
|
||||
}
|
||||
}
|
||||
|
||||
// Generate image metadata (we really just want the sha256 hash of the image)
|
||||
dockerCmd += ["--metadata-file", marker]
|
||||
|
||||
extension.buildArgs.get().each { k, v -> dockerCmd += ["--build-arg", "${k}=${v}"]
|
||||
}
|
||||
|
||||
extension.tags.get().each { taskName, tag ->
|
||||
dockerCmd += ["-t", tag]
|
||||
}
|
||||
|
||||
dockerCmd << buildContext
|
||||
|
||||
// Some projects use a Dockerfile with the non-default name.
|
||||
dockerCmd += ["--file", extension.dockerfile.get().toPath()]
|
||||
|
||||
// Mask GitHub token in the output for security
|
||||
def maskedCmd = dockerCmd.collect { arg ->
|
||||
if (arg.startsWith("type=gha,token=")) {
|
||||
"type=gha,token=****"
|
||||
} else {
|
||||
arg
|
||||
}
|
||||
}
|
||||
println(maskedCmd.join(" "))
|
||||
commandLine dockerCmd
|
||||
|
||||
outputs.upToDateWhen {
|
||||
try {
|
||||
/* The docker task is up-to-date if
|
||||
* 1. the last build generated a marker file
|
||||
* 2. An image with the same tag exists in local docker images
|
||||
* 3. that existing image sha256 matches what is written in the generated marker file
|
||||
*/
|
||||
def jsonContent = new File(marker).text
|
||||
def jsonData = new JsonSlurper().parseText(jsonContent)
|
||||
def imageIdFromMarker = jsonData['containerimage.digest']
|
||||
if (imageIdFromMarker != null && imageIdFromMarker.startsWith("sha256:")) {
|
||||
imageIdFromMarker = imageIdFromMarker.substring(7); // "sha256:".length() == 7
|
||||
}
|
||||
for(String tag : extension.tags.get().values()) {
|
||||
def actualImage = _getDockerImages(tag)
|
||||
if (actualImage == null || actualImage.size() == 0 && imageIdFromMarker == null ||
|
||||
!imageIdFromMarker.startsWith(actualImage.get(0))) {
|
||||
logger.debug("UP-TO-DATE CHECK for ${name}: did not find image ${imageIdFromMarker}")
|
||||
return false
|
||||
}
|
||||
}
|
||||
logger.debug("UP-TO-DATE CHECK for ${name}: Is up-to-date, skipping")
|
||||
return true
|
||||
} catch (Exception e) {
|
||||
// any exceptions also implicitly mean not-up-to-date
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
project.tasks.register("dockerFromCache") {
|
||||
// This task is same as docker but without the dockerPrepare dependency. This is useful in CI where a
|
||||
// github workflow can run multiple jobs in parallel which all share the docker build context root folder.
|
||||
// This is faster than uploading and downloading all images together.
|
||||
group "docker"
|
||||
description "Builds the docker image from cache and applies all tags defined but without the dependencies."
|
||||
// Reference the docker original task's configuration and run it but without its dependencies
|
||||
doLast {
|
||||
def originalTaskInstance = tasks.named('docker').get()
|
||||
project.exec {
|
||||
commandLine originalTaskInstance.commandLine
|
||||
workingDir originalTaskInstance.workingDir
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension.tags.get().each { taskName, tag ->
|
||||
// For backward compatibility, can be removed if we dont really have a need post migration
|
||||
// TODO: Choice of task names is to retain current names so that downstream dependencies in quickstart still work
|
||||
// without changes. Can be changed post full migration.
|
||||
project.tasks.register("dockerTag${taskName}") {
|
||||
dependsOn project.tasks.named("docker")
|
||||
}
|
||||
}
|
||||
|
||||
task dockerClean {
|
||||
group "docker"
|
||||
doLast {
|
||||
extension.tags.get().each { _, tag -> _cleanLocalDockerImages(tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -230,11 +230,11 @@ nexusStaging {
|
||||
password = System.getenv("NEXUS_PASSWORD")
|
||||
}
|
||||
|
||||
task cleanExtraDirs {
|
||||
clean{
|
||||
delete "$projectDir/derby.log"
|
||||
delete "$projectDir/src/test/resources/data/hive"
|
||||
delete "$projectDir/src/test/resources/data/out.csv"
|
||||
delete "$projectDir/src/test/resources/data/out_persist.csv"
|
||||
delete "$projectDir/spark-smoke-test/venv"
|
||||
}
|
||||
clean.finalizedBy(cleanExtraDirs)
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
plugins {
|
||||
id 'org.springframework.boot'
|
||||
id 'java'
|
||||
id 'com.palantir.docker'
|
||||
}
|
||||
|
||||
apply from: '../../gradle/coverage/java-coverage.gradle'
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-mae-consumer'
|
||||
@ -44,8 +44,9 @@ bootJar {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(bootJar)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
//version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||
files bootJar.outputs.files
|
||||
files fileTree(rootProject.projectDir) {
|
||||
@ -56,12 +57,9 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -79,11 +77,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByPath(":metadata-jobs:mae-consumer-job:docker").dependsOn([bootJar])
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,11 +1,11 @@
|
||||
plugins {
|
||||
id 'org.springframework.boot'
|
||||
id 'java'
|
||||
id 'com.palantir.docker'
|
||||
}
|
||||
|
||||
apply from: '../../gradle/coverage/java-coverage.gradle'
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-mce-consumer'
|
||||
@ -55,8 +55,9 @@ bootJar {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn(bootJar)
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
//version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||
files bootJar.outputs.files
|
||||
files fileTree(rootProject.projectDir) {
|
||||
@ -67,12 +68,9 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -90,11 +88,3 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByPath(":metadata-jobs:mce-consumer-job:docker").dependsOn([bootJar])
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
@ -1,11 +1,11 @@
|
||||
plugins {
|
||||
id 'org.springframework.boot'
|
||||
id 'com.palantir.docker'
|
||||
id 'java'
|
||||
}
|
||||
|
||||
apply from: '../../gradle/coverage/java-coverage.gradle'
|
||||
apply from: "../../gradle/versioning/versioning.gradle"
|
||||
apply from: "../../gradle/docker/docker.gradle"
|
||||
|
||||
ext {
|
||||
docker_repo = 'datahub-gms'
|
||||
@ -83,8 +83,8 @@ bootRun {
|
||||
}
|
||||
|
||||
docker {
|
||||
dependsOn bootJar
|
||||
name "${docker_registry}/${docker_repo}:v${version}"
|
||||
version "v${version}"
|
||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||
files bootJar.outputs.files
|
||||
files fileTree(rootProject.projectDir) {
|
||||
@ -95,12 +95,7 @@ docker {
|
||||
}.exclude {
|
||||
i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden())
|
||||
}
|
||||
tag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// platform('linux/arm64', 'linux/amd64')
|
||||
buildx(true)
|
||||
load(true)
|
||||
push(false)
|
||||
additionalTag("Debug", "${docker_registry}/${docker_repo}:debug")
|
||||
|
||||
// Add build args if they are defined (needed for some CI or enterprise environments)
|
||||
def dockerBuildArgs = [:]
|
||||
@ -118,14 +113,6 @@ docker {
|
||||
buildArgs(dockerBuildArgs)
|
||||
}
|
||||
}
|
||||
tasks.getByPath(":metadata-service:war:docker").dependsOn([bootJar])
|
||||
|
||||
task cleanLocalDockerImages {
|
||||
doLast {
|
||||
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
|
||||
}
|
||||
}
|
||||
dockerClean.finalizedBy(cleanLocalDockerImages)
|
||||
|
||||
test {
|
||||
jacoco {
|
||||
|
Loading…
x
Reference in New Issue
Block a user