diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index e463e15243..87fa3c85fc 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -42,8 +42,12 @@ jobs: cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh + - name: Disk Check + run: df -h . && docker images - name: Remove images run: docker image prune -a -f || true + - name: Disk Check + run: df -h . && docker images - name: Smoke test run: | ./gradlew :metadata-integration:java:spark-lineage:integrationTest \ @@ -54,12 +58,24 @@ jobs: -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar + - name: store logs + if: failure() + run: | + docker ps -a + docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true + docker logs broker >& broker-${{ matrix.test_strategy }}.log || true + docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true + docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true - name: Upload logs uses: actions/upload-artifact@v3 if: failure() with: name: docker logs - path: "docker/build/container-logs/*.log" + path: | + "**/build/container-logs/*.log" + "*.log" - uses: actions/upload-artifact@v3 if: always() with: diff --git a/docker/build.gradle b/docker/build.gradle index cc95e12f26..b14739104a 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -8,15 +8,17 @@ import com.avast.gradle.dockercompose.tasks.ComposeDownForced apply from: "../gradle/versioning/versioning.gradle" ext { - quickstart_modules = [ + backend_profile_modules = [ ':docker:elasticsearch-setup', ':docker:mysql-setup', ':docker:kafka-setup', ':datahub-upgrade', + ':metadata-service:war', + ] + quickstart_modules = backend_profile_modules + [ ':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job', - ':metadata-service:war', - ':datahub-frontend', + ':datahub-frontend' ] debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', @@ -90,9 +92,14 @@ dockerCompose { removeVolumes = false } + /** + * The smallest disk footprint required for Spark integration tests + * + * No frontend, mae, mce, or other services + */ quickstartSlim { isRequiredBy(tasks.named('quickstartSlim')) - composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + composeAdditionalArgs = ['--profile', 'quickstart-backend'] environment.put 'DATAHUB_VERSION', "v${version}" environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" @@ -132,7 +139,7 @@ tasks.getByName('quickstartComposeUp').dependsOn( tasks.getByName('quickstartPgComposeUp').dependsOn( pg_quickstart_modules.collect { it + ':dockerTag' }) tasks.getByName('quickstartSlimComposeUp').dependsOn( - ([':docker:datahub-ingestion'] + quickstart_modules) + ([':docker:datahub-ingestion'] + backend_profile_modules) .collect { it + ':dockerTag' }) tasks.getByName('quickstartDebugComposeUp').dependsOn( debug_modules.collect { it + ':dockerTagDebug' } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index fa896814d1..a4eb035b0a 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -136,6 +136,7 @@ public class TestSparkJobsLineage { .respond(HttpResponse.response().withStatusCode(200)); } + @BeforeClass public static void init() { mockServer = startClientAndServer(GMS_PORT); resetBaseExpectations(); @@ -219,8 +220,12 @@ public class TestSparkJobsLineage { @AfterClass public static void tearDown() throws Exception { - spark.stop(); - mockServer.stop(); + if (spark != null) { + spark.stop(); + } + if (mockServer != null) { + mockServer.stop(); + } } private static void check(List expected, List actual) {