241 lines
7.8 KiB
Groovy

plugins {
id("com.palantir.git-version") apply false
}
apply plugin: 'java'
apply plugin: 'com.gradleup.shadow'
apply plugin: 'signing'
apply plugin: 'io.codearte.nexus-staging'
apply plugin: 'maven-publish'
apply from: '../versioning.gradle'
jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation
//to rename artifacts for publish
project.archivesBaseName = 'datahub-'+project.name
//mark implementaion dependencies which needs to excluded along with transitive dependencies from shadowjar
//functionality is exactly same as "implementation"
configurations {
provided
implementation.extendsFrom provided
}
dependencies {
constraints {
provided(externalDependency.hadoopMapreduceClient) {
because 'Needed for tie breaking of guava version need for spark and wiremock'
}
provided(externalDependency.hadoopCommon) {
because 'required for org.apache.hadoop.util.StopWatch'
}
provided(externalDependency.commonsIo) {
because 'required for org.apache.commons.io.Charsets that is used internally'
}
}
provided 'org.scala-lang:scala-library:2.12.18'
implementation externalDependency.slf4jApi
compileOnly externalDependency.lombok
annotationProcessor externalDependency.lombok
implementation externalDependency.typesafeConfig
implementation externalDependency.opentracingJdbc
implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow')
provided(externalDependency.sparkSql)
provided(externalDependency.sparkHive)
implementation externalDependency.httpClient
// Tests need a concrete log4j available. Providing it here
testImplementation 'org.apache.logging.log4j:log4j-api:2.17.1'
testImplementation 'org.apache.logging.log4j:log4j-core:2.17.1'
testImplementation(externalDependency.postgresql){
exclude group: "com.fasterxml.jackson.core"
}
testImplementation externalDependency.mockito
testImplementation(externalDependency.mockServer){
exclude group: "com.fasterxml.jackson.core"
} // older version to allow older guava
testImplementation(externalDependency.mockServerClient){
exclude group: "com.fasterxml.jackson.core"
} // older version to allow older guava
testImplementation(externalDependency.testContainersPostgresql)
}
task checkShadowJar(type: Exec) {
commandLine 'sh', '-c', 'scripts/check_jar.sh'
}
shadowJar {
zip64=true
archiveClassifier = ''
mergeServiceFiles()
def exclude_modules = project
.configurations
.provided
.resolvedConfiguration
.getLenientConfiguration()
.getAllModuleDependencies()
.collect {
it.name
}
dependencies {
exclude(dependency {
exclude_modules.contains(it.name)
})
}
// preventing java multi-release JAR leakage
// https://github.com/johnrengelman/shadow/issues/729
exclude('module-info.class', 'META-INF/versions/**', 'LICENSE', 'NOTICE')
// prevent jni conflict with spark
exclude '**/libzstd-jni.*'
exclude '**/com_github_luben_zstd_*'
exclude '**/log4j*.xml'
relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson'
relocate 'org.slf4j','datahub.shaded.org.slf4j'
relocate 'org.apache.hc','datahub.spark2.shaded.http'
relocate 'org.apache.commons.codec', 'datahub.spark2.shaded.o.a.c.codec'
relocate 'org.apache.commons.compress', 'datahub.spark2.shaded.o.a.c.compress'
relocate 'org.apache.commons.io', 'datahub.spark2.shaded.o.a.c.io'
relocate 'org.apache.commons.lang3', 'datahub.spark2.shaded.o.a.c.lang3'
relocate 'mozilla', 'datahub.spark2.shaded.mozilla'
relocate 'com.typesafe','datahub.spark2.shaded.typesafe'
relocate 'io.opentracing','datahub.spark2.shaded.io.opentracing'
relocate 'io.netty','datahub.spark2.shaded.io.netty'
relocate 'ch.randelshofer', 'datahub.shaded.ch.randelshofer'
relocate 'com.sun', 'datahub.shaded.com.sun'
relocate 'avroutil1', 'datahub.shaded.avroutil1'
relocate 'com.github', 'datahub.shaded.com.github'
relocate 'org.apache.maven', 'datahub.shaded.org.apache.maven'
finalizedBy checkShadowJar
}
test {
forkEvery = 1
useJUnit()
def sparkJava17CompatibleJvmArgs = [
"--add-opens=java.base/java.lang=ALL-UNNAMED",
//"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
//"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
//"--add-opens=java.base/java.io=ALL-UNNAMED",
"--add-opens=java.base/java.net=ALL-UNNAMED",
"--add-opens=java.base/java.nio=ALL-UNNAMED",
//"--add-opens=java.base/java.util=ALL-UNNAMED",
//"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
//"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
//"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
//"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
//"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
//"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED",
]
jvmArgs = sparkJava17CompatibleJvmArgs
}
assemble {
dependsOn shadowJar
}
task integrationTest(type: Exec, dependsOn: [shadowJar, ':docker:quickstartSlim'] ) {
environment "RUN_QUICKSTART", "false"
commandLine "spark-smoke-test/smoke.sh"
}
task sourcesJar(type: Jar) {
archiveClassifier = 'sources'
from sourceSets.main.allJava
}
task javadocJar(type: Jar, dependsOn: javadoc) {
archiveClassifier = 'javadoc'
from javadoc.destinationDir
}
publishing {
publications {
shadow(MavenPublication) {
publication -> project.shadow.component(publication)
pom {
name = 'Datahub Spark Lineage'
group = 'io.acryl'
artifactId = 'datahub-spark-lineage'
description = 'Library to push data lineage from spark to datahub'
url = 'https://datahubproject.io'
artifacts = [ shadowJar, javadocJar, sourcesJar ]
scm {
connection = 'scm:git:git://github.com/datahub-project/datahub.git'
developerConnection = 'scm:git:ssh://github.com:datahub-project/datahub.git'
url = 'https://github.com/datahub-project/datahub.git'
}
licenses {
license {
name = 'The Apache License, Version 2.0'
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
}
}
developers {
developer {
id = 'datahub'
name = 'Datahub'
email = 'datahub@acryl.io'
}
}
}
}
}
repositories {
maven {
def releasesRepoUrl = "https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/"
def snapshotsRepoUrl = "https://s01.oss.sonatype.org/content/repositories/snapshots/"
def ossrhUsername = System.getenv('RELEASE_USERNAME')
def ossrhPassword = System.getenv('RELEASE_PASSWORD')
credentials {
username ossrhUsername
password ossrhPassword
}
url = version.endsWith('SNAPSHOT') ? snapshotsRepoUrl : releasesRepoUrl
}
}
}
signing {
def signingKey = findProperty("signingKey")
def signingPassword = System.getenv("SIGNING_PASSWORD")
useInMemoryPgpKeys(signingKey, signingPassword)
sign publishing.publications.shadow
}
nexusStaging {
serverUrl = "https://s01.oss.sonatype.org/service/local/" //required only for projects registered in Sonatype after 2021-02-24
username = System.getenv("NEXUS_USERNAME")
password = System.getenv("NEXUS_PASSWORD")
}
task cleanExtraDirs {
delete "$projectDir/derby.log"
delete "$projectDir/src/test/resources/data/hive"
delete "$projectDir/src/test/resources/data/out.csv"
delete "$projectDir/src/test/resources/data/out_persist.csv"
delete "$projectDir/spark-smoke-test/venv"
}
clean.finalizedBy(cleanExtraDirs)