plugins { id("com.palantir.git-version") apply false } apply plugin: 'java' apply plugin: 'com.github.johnrengelman.shadow' apply plugin: 'signing' apply plugin: 'io.codearte.nexus-staging' apply plugin: 'maven-publish' apply plugin: 'jacoco' apply from: '../versioning.gradle' jar.enabled = false // Since we only want to build shadow jars, disabling the regular jar creation tasks.withType(JavaCompile).configureEach { javaCompiler = javaToolchains.compilerFor { languageVersion = JavaLanguageVersion.of(8) } } tasks.withType(Test).configureEach { javaLauncher = javaToolchains.launcherFor { languageVersion = JavaLanguageVersion.of(8) } } //to rename artifacts for publish project.archivesBaseName = 'datahub-'+project.name //mark implementaion dependencies which needs to excluded along with transitive dependencies from shadowjar //functionality is exactly same as "implementation" configurations { provided implementation.extendsFrom provided } dependencies { //Needed for tie breaking of guava version need for spark and wiremock provided(externalDependency.hadoopMapreduceClient) { force = true } provided(externalDependency.hadoopCommon) { force = true } // required for org.apache.hadoop.util.StopWatch provided(externalDependency.commonsIo) { force = true } // required for org.apache.commons.io.Charsets that is used internally implementation externalDependency.slf4jApi compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok implementation externalDependency.typesafeConfig implementation externalDependency.opentracingJdbc implementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') provided(externalDependency.sparkSql) provided(externalDependency.sparkHive) implementation externalDependency.httpAsyncClient // Tests need a concrete log4j available. Providing it here testImplementation 'org.apache.logging.log4j:log4j-api:2.17.1' testImplementation 'org.apache.logging.log4j:log4j-core:2.17.1' testImplementation(externalDependency.postgresql){ exclude group: "com.fasterxml.jackson.core" } testImplementation externalDependency.mockito testImplementation(externalDependency.mockServer){ exclude group: "com.fasterxml.jackson.core" } // older version to allow older guava testImplementation(externalDependency.mockServerClient){ exclude group: "com.fasterxml.jackson.core" } // older version to allow older guava testImplementation(externalDependency.testContainersPostgresql) } task checkShadowJar(type: Exec) { commandLine 'sh', '-c', 'scripts/check_jar.sh' } shadowJar { zip64=true classifier='' mergeServiceFiles() def exclude_modules = project .configurations .provided .resolvedConfiguration .getLenientConfiguration() .getAllModuleDependencies() .collect { it.name } dependencies { exclude(dependency { exclude_modules.contains(it.name) }) } // preventing java multi-release JAR leakage // https://github.com/johnrengelman/shadow/issues/729 exclude('module-info.class', 'META-INF/versions/**') // prevent jni conflict with spark exclude '**/libzstd-jni.*' exclude '**/com_github_luben_zstd_*' relocate 'com.fasterxml.jackson', 'datahub.shaded.jackson' relocate 'org.slf4j','datahub.shaded.org.slf4j' relocate 'org.apache.http','datahub.spark2.shaded.http' relocate 'org.apache.commons.codec', 'datahub.spark2.shaded.o.a.c.codec' relocate 'org.apache.commons.compress', 'datahub.spark2.shaded.o.a.c.compress' relocate 'mozilla', 'datahub.spark2.shaded.mozilla' relocate 'com.typesafe','datahub.spark2.shaded.typesafe' relocate 'io.opentracing','datahub.spark2.shaded.io.opentracing' relocate 'io.netty','datahub.spark2.shaded.io.netty' finalizedBy checkShadowJar } checkShadowJar { dependsOn shadowJar } jacocoTestReport { dependsOn test // tests are required to run before generating the report } test { forkEvery = 1 useJUnit() finalizedBy jacocoTestReport } assemble { dependsOn shadowJar } task integrationTest(type: Exec ) { commandLine "spark-smoke-test/smoke.sh" } task sourcesJar(type: Jar) { classifier 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar, dependsOn: javadoc) { classifier 'javadoc' from javadoc.destinationDir } publishing { publications { shadow(MavenPublication) { publication -> project.shadow.component(publication) pom { name = 'Datahub Spark Lineage' group = 'io.acryl' artifactId = 'datahub-spark-lineage' description = 'Library to push data lineage from spark to datahub' url = 'https://datahubproject.io' artifacts = [ shadowJar, javadocJar, sourcesJar ] scm { connection = 'scm:git:git://github.com/datahub-project/datahub.git' developerConnection = 'scm:git:ssh://github.com:datahub-project/datahub.git' url = 'https://github.com/datahub-project/datahub.git' } licenses { license { name = 'The Apache License, Version 2.0' url = 'http://www.apache.org/licenses/LICENSE-2.0.txt' } } developers { developer { id = 'datahub' name = 'Datahub' email = 'datahub@acryl.io' } } } } } repositories { maven { def releasesRepoUrl = "https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/" def snapshotsRepoUrl = "https://s01.oss.sonatype.org/content/repositories/snapshots/" def ossrhUsername = System.getenv('RELEASE_USERNAME') def ossrhPassword = System.getenv('RELEASE_PASSWORD') credentials { username ossrhUsername password ossrhPassword } url = version.endsWith('SNAPSHOT') ? snapshotsRepoUrl : releasesRepoUrl } } } signing { def signingKey = findProperty("signingKey") def signingPassword = System.getenv("SIGNING_PASSWORD") useInMemoryPgpKeys(signingKey, signingPassword) sign publishing.publications.shadow } nexusStaging { serverUrl = "https://s01.oss.sonatype.org/service/local/" //required only for projects registered in Sonatype after 2021-02-24 username = System.getenv("NEXUS_USERNAME") password = System.getenv("NEXUS_PASSWORD") }