mirror of
https://github.com/datahub-project/datahub.git
synced 2025-11-12 01:11:41 +00:00
test(spark-lineage): minor tweaks (#9717)
This commit is contained in:
parent
caf6ebe3b7
commit
d292b35f23
18
.github/workflows/spark-smoke-test.yml
vendored
18
.github/workflows/spark-smoke-test.yml
vendored
@ -42,8 +42,12 @@ jobs:
|
|||||||
cache: "pip"
|
cache: "pip"
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: ./metadata-ingestion/scripts/install_deps.sh
|
run: ./metadata-ingestion/scripts/install_deps.sh
|
||||||
|
- name: Disk Check
|
||||||
|
run: df -h . && docker images
|
||||||
- name: Remove images
|
- name: Remove images
|
||||||
run: docker image prune -a -f || true
|
run: docker image prune -a -f || true
|
||||||
|
- name: Disk Check
|
||||||
|
run: df -h . && docker images
|
||||||
- name: Smoke test
|
- name: Smoke test
|
||||||
run: |
|
run: |
|
||||||
./gradlew :metadata-integration:java:spark-lineage:integrationTest \
|
./gradlew :metadata-integration:java:spark-lineage:integrationTest \
|
||||||
@ -54,12 +58,24 @@ jobs:
|
|||||||
-x :datahub-web-react:yarnBuild \
|
-x :datahub-web-react:yarnBuild \
|
||||||
-x :datahub-web-react:distZip \
|
-x :datahub-web-react:distZip \
|
||||||
-x :datahub-web-react:jar
|
-x :datahub-web-react:jar
|
||||||
|
- name: store logs
|
||||||
|
if: failure()
|
||||||
|
run: |
|
||||||
|
docker ps -a
|
||||||
|
docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true
|
||||||
|
docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true
|
||||||
|
docker logs broker >& broker-${{ matrix.test_strategy }}.log || true
|
||||||
|
docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true
|
||||||
|
docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true
|
||||||
|
docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true
|
||||||
- name: Upload logs
|
- name: Upload logs
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: docker logs
|
name: docker logs
|
||||||
path: "docker/build/container-logs/*.log"
|
path: |
|
||||||
|
"**/build/container-logs/*.log"
|
||||||
|
"*.log"
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
|
|||||||
@ -8,15 +8,17 @@ import com.avast.gradle.dockercompose.tasks.ComposeDownForced
|
|||||||
apply from: "../gradle/versioning/versioning.gradle"
|
apply from: "../gradle/versioning/versioning.gradle"
|
||||||
|
|
||||||
ext {
|
ext {
|
||||||
quickstart_modules = [
|
backend_profile_modules = [
|
||||||
':docker:elasticsearch-setup',
|
':docker:elasticsearch-setup',
|
||||||
':docker:mysql-setup',
|
':docker:mysql-setup',
|
||||||
':docker:kafka-setup',
|
':docker:kafka-setup',
|
||||||
':datahub-upgrade',
|
':datahub-upgrade',
|
||||||
|
':metadata-service:war',
|
||||||
|
]
|
||||||
|
quickstart_modules = backend_profile_modules + [
|
||||||
':metadata-jobs:mce-consumer-job',
|
':metadata-jobs:mce-consumer-job',
|
||||||
':metadata-jobs:mae-consumer-job',
|
':metadata-jobs:mae-consumer-job',
|
||||||
':metadata-service:war',
|
':datahub-frontend'
|
||||||
':datahub-frontend',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job',
|
debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job',
|
||||||
@ -90,9 +92,14 @@ dockerCompose {
|
|||||||
removeVolumes = false
|
removeVolumes = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The smallest disk footprint required for Spark integration tests
|
||||||
|
*
|
||||||
|
* No frontend, mae, mce, or other services
|
||||||
|
*/
|
||||||
quickstartSlim {
|
quickstartSlim {
|
||||||
isRequiredBy(tasks.named('quickstartSlim'))
|
isRequiredBy(tasks.named('quickstartSlim'))
|
||||||
composeAdditionalArgs = ['--profile', 'quickstart-consumers']
|
composeAdditionalArgs = ['--profile', 'quickstart-backend']
|
||||||
|
|
||||||
environment.put 'DATAHUB_VERSION', "v${version}"
|
environment.put 'DATAHUB_VERSION', "v${version}"
|
||||||
environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion"
|
environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion"
|
||||||
@ -132,7 +139,7 @@ tasks.getByName('quickstartComposeUp').dependsOn(
|
|||||||
tasks.getByName('quickstartPgComposeUp').dependsOn(
|
tasks.getByName('quickstartPgComposeUp').dependsOn(
|
||||||
pg_quickstart_modules.collect { it + ':dockerTag' })
|
pg_quickstart_modules.collect { it + ':dockerTag' })
|
||||||
tasks.getByName('quickstartSlimComposeUp').dependsOn(
|
tasks.getByName('quickstartSlimComposeUp').dependsOn(
|
||||||
([':docker:datahub-ingestion'] + quickstart_modules)
|
([':docker:datahub-ingestion'] + backend_profile_modules)
|
||||||
.collect { it + ':dockerTag' })
|
.collect { it + ':dockerTag' })
|
||||||
tasks.getByName('quickstartDebugComposeUp').dependsOn(
|
tasks.getByName('quickstartDebugComposeUp').dependsOn(
|
||||||
debug_modules.collect { it + ':dockerTagDebug' }
|
debug_modules.collect { it + ':dockerTagDebug' }
|
||||||
|
|||||||
@ -136,6 +136,7 @@ public class TestSparkJobsLineage {
|
|||||||
.respond(HttpResponse.response().withStatusCode(200));
|
.respond(HttpResponse.response().withStatusCode(200));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
public static void init() {
|
public static void init() {
|
||||||
mockServer = startClientAndServer(GMS_PORT);
|
mockServer = startClientAndServer(GMS_PORT);
|
||||||
resetBaseExpectations();
|
resetBaseExpectations();
|
||||||
@ -219,8 +220,12 @@ public class TestSparkJobsLineage {
|
|||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void tearDown() throws Exception {
|
public static void tearDown() throws Exception {
|
||||||
spark.stop();
|
if (spark != null) {
|
||||||
mockServer.stop();
|
spark.stop();
|
||||||
|
}
|
||||||
|
if (mockServer != null) {
|
||||||
|
mockServer.stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void check(List<DatasetLineage> expected, List<DatasetLineage> actual) {
|
private static void check(List<DatasetLineage> expected, List<DatasetLineage> actual) {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user