test(spark-lineage): minor tweaks (#9717)

This commit is contained in:
david-leifker 2024-01-25 12:41:51 -06:00 committed by GitHub
parent caf6ebe3b7
commit d292b35f23
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 36 additions and 8 deletions

View File

@ -42,8 +42,12 @@ jobs:
cache: "pip"
- name: Install dependencies
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Disk Check
run: df -h . && docker images
- name: Remove images
run: docker image prune -a -f || true
- name: Disk Check
run: df -h . && docker images
- name: Smoke test
run: |
./gradlew :metadata-integration:java:spark-lineage:integrationTest \
@ -54,12 +58,24 @@ jobs:
-x :datahub-web-react:yarnBuild \
-x :datahub-web-react:distZip \
-x :datahub-web-react:jar
- name: store logs
if: failure()
run: |
docker ps -a
docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true
docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true
docker logs broker >& broker-${{ matrix.test_strategy }}.log || true
docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true
docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true
docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true
- name: Upload logs
uses: actions/upload-artifact@v3
if: failure()
with:
name: docker logs
path: "docker/build/container-logs/*.log"
path: |
"**/build/container-logs/*.log"
"*.log"
- uses: actions/upload-artifact@v3
if: always()
with:

View File

@ -8,15 +8,17 @@ import com.avast.gradle.dockercompose.tasks.ComposeDownForced
apply from: "../gradle/versioning/versioning.gradle"
ext {
quickstart_modules = [
backend_profile_modules = [
':docker:elasticsearch-setup',
':docker:mysql-setup',
':docker:kafka-setup',
':datahub-upgrade',
':metadata-service:war',
]
quickstart_modules = backend_profile_modules + [
':metadata-jobs:mce-consumer-job',
':metadata-jobs:mae-consumer-job',
':metadata-service:war',
':datahub-frontend',
':datahub-frontend'
]
debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job',
@ -90,9 +92,14 @@ dockerCompose {
removeVolumes = false
}
/**
* The smallest disk footprint required for Spark integration tests
*
* No frontend, mae, mce, or other services
*/
quickstartSlim {
isRequiredBy(tasks.named('quickstartSlim'))
composeAdditionalArgs = ['--profile', 'quickstart-consumers']
composeAdditionalArgs = ['--profile', 'quickstart-backend']
environment.put 'DATAHUB_VERSION', "v${version}"
environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion"
@ -132,7 +139,7 @@ tasks.getByName('quickstartComposeUp').dependsOn(
tasks.getByName('quickstartPgComposeUp').dependsOn(
pg_quickstart_modules.collect { it + ':dockerTag' })
tasks.getByName('quickstartSlimComposeUp').dependsOn(
([':docker:datahub-ingestion'] + quickstart_modules)
([':docker:datahub-ingestion'] + backend_profile_modules)
.collect { it + ':dockerTag' })
tasks.getByName('quickstartDebugComposeUp').dependsOn(
debug_modules.collect { it + ':dockerTagDebug' }

View File

@ -136,6 +136,7 @@ public class TestSparkJobsLineage {
.respond(HttpResponse.response().withStatusCode(200));
}
@BeforeClass
public static void init() {
mockServer = startClientAndServer(GMS_PORT);
resetBaseExpectations();
@ -219,8 +220,12 @@ public class TestSparkJobsLineage {
@AfterClass
public static void tearDown() throws Exception {
spark.stop();
mockServer.stop();
if (spark != null) {
spark.stop();
}
if (mockServer != null) {
mockServer.stop();
}
}
private static void check(List<DatasetLineage> expected, List<DatasetLineage> actual) {