mirror of
https://github.com/datahub-project/datahub.git
synced 2025-06-27 05:03:31 +00:00
CI speedup (#13057)
This commit is contained in:
parent
4e48e098dc
commit
cf40116680
581
.github/workflows/docker-unified.yml
vendored
581
.github/workflows/docker-unified.yml
vendored
@ -33,13 +33,17 @@ env:
|
|||||||
DATAHUB_INGESTION_BASE_IMAGE: "acryldata/datahub-ingestion-base"
|
DATAHUB_INGESTION_BASE_IMAGE: "acryldata/datahub-ingestion-base"
|
||||||
DATAHUB_INGESTION_IMAGE: "acryldata/datahub-ingestion"
|
DATAHUB_INGESTION_IMAGE: "acryldata/datahub-ingestion"
|
||||||
|
|
||||||
|
DOCKER_CACHE: "DEPOT"
|
||||||
|
DEPOT_PROJECT_ID: ${{ vars.DEPOT_PROJECT_ID }}
|
||||||
|
DEPOT_TOKEN: ${{ secrets.DEPOT_TOKEN }}
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
setup:
|
setup:
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04-small
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
slim_tag: ${{ steps.tag.outputs.slim_tag }}
|
slim_tag: ${{ steps.tag.outputs.slim_tag }}
|
||||||
@ -116,54 +120,106 @@ jobs:
|
|||||||
echo "publish=${{ env.ENABLE_PUBLISH }}" >> "$GITHUB_OUTPUT"
|
echo "publish=${{ env.ENABLE_PUBLISH }}" >> "$GITHUB_OUTPUT"
|
||||||
- uses: ./.github/actions/ci-optimization
|
- uses: ./.github/actions/ci-optimization
|
||||||
id: ci-optimize
|
id: ci-optimize
|
||||||
|
|
||||||
|
smoke_test_lint:
|
||||||
|
name: Lint on smoke tests
|
||||||
|
runs-on: depot-ubuntu-24.04
|
||||||
|
needs: setup
|
||||||
|
if: ${{ needs.setup.outputs.smoke_test_change }}" == 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
|
|
||||||
- uses: actions/cache@v4
|
- uses: actions/cache@v4
|
||||||
if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cache/uv
|
~/.cache/uv
|
||||||
key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }}
|
key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }}
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
- uses: actions/cache@v4
|
||||||
if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }}
|
|
||||||
with:
|
with:
|
||||||
distribution: "zulu"
|
path: |
|
||||||
java-version: 17
|
~/.cache/yarn
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||||
|
|
||||||
- name: Run lint on smoke test
|
- name: Run lint on smoke test
|
||||||
if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }}
|
|
||||||
run: |
|
run: |
|
||||||
python ./.github/scripts/check_python_package.py
|
python ./.github/scripts/check_python_package.py
|
||||||
./gradlew :smoke-test:pythonLint
|
./gradlew :smoke-test:pythonLint
|
||||||
./gradlew :smoke-test:cypressLint
|
./gradlew :smoke-test:cypressLint
|
||||||
|
|
||||||
gms_build:
|
base_build:
|
||||||
name: Build and Push DataHub GMS Docker Image
|
name: Prepare all images
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04-4
|
||||||
needs: setup
|
needs: setup
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Set up JDK 17
|
- name: Set up JDK 17
|
||||||
uses: actions/setup-java@v4
|
uses: actions/setup-java@v4
|
||||||
with:
|
with:
|
||||||
distribution: "zulu"
|
distribution: "zulu"
|
||||||
java-version: 17
|
java-version: 17
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
|
|
||||||
|
#- uses: gradle/actions/setup-gradle@v4
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cache/uv
|
||||||
|
key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }}
|
||||||
|
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cache/yarn
|
||||||
|
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Pre-build artifacts for docker image
|
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: "pip"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Build all Docker Contexts
|
||||||
run: |
|
run: |
|
||||||
./gradlew :metadata-service:war:build -x test --parallel
|
./gradlew :docker:PrepareAllQuickStartConsumers
|
||||||
mv ./metadata-service/war/build/libs/war.war .
|
|
||||||
|
|
||||||
|
- uses: actions/cache/save@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
|
|
||||||
|
gms_build:
|
||||||
|
name: Build and Push DataHub GMS Docker Image
|
||||||
|
runs-on: depot-ubuntu-24.04
|
||||||
|
needs: [setup, base_build]
|
||||||
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out the repo
|
||||||
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -173,7 +229,7 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/metadata-service/war/docker
|
||||||
file: ./docker/datahub-gms/Dockerfile
|
file: ./docker/datahub-gms/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
gms_scan:
|
gms_scan:
|
||||||
@ -182,7 +238,7 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan GMS images for vulnerabilities"
|
name: "[Monitoring] Scan GMS images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, gms_build]
|
needs: [setup, gms_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
@ -214,27 +270,22 @@ jobs:
|
|||||||
|
|
||||||
mae_consumer_build:
|
mae_consumer_build:
|
||||||
name: Build and Push DataHub MAE Consumer Docker Image
|
name: Build and Push DataHub MAE Consumer Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, smoke_test_lint, base_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: "zulu"
|
|
||||||
java-version: 17
|
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Pre-build artifacts for docker image
|
|
||||||
run: |
|
- name: Set up Depot CLI
|
||||||
./gradlew :metadata-jobs:mae-consumer-job:build -x test --parallel
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
mv ./metadata-jobs/mae-consumer-job/build/libs/mae-consumer-job.jar .
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -244,13 +295,13 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/metadata-jobs/mae-consumer-job/docker
|
||||||
file: ./docker/datahub-mae-consumer/Dockerfile
|
file: ./docker/datahub-mae-consumer/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
mae_consumer_scan:
|
mae_consumer_scan:
|
||||||
name: "[Monitoring] Scan MAE consumer images for vulnerabilities"
|
name: "[Monitoring] Scan MAE consumer images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, mae_consumer_build]
|
needs: [setup, smoke_test_lint,mae_consumer_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # for actions/checkout to fetch code
|
contents: read # for actions/checkout to fetch code
|
||||||
@ -285,27 +336,21 @@ jobs:
|
|||||||
|
|
||||||
mce_consumer_build:
|
mce_consumer_build:
|
||||||
name: Build and Push DataHub MCE Consumer Docker Image
|
name: Build and Push DataHub MCE Consumer Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: "zulu"
|
|
||||||
java-version: 17
|
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Pre-build artifacts for docker image
|
|
||||||
run: |
|
- name: Set up Depot CLI
|
||||||
./gradlew :metadata-jobs:mce-consumer-job:build -x test --parallel
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
mv ./metadata-jobs/mce-consumer-job/build/libs/mce-consumer-job.jar .
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -315,13 +360,13 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/metadata-jobs/mce-consumer-job/docker
|
||||||
file: ./docker/datahub-mce-consumer/Dockerfile
|
file: ./docker/datahub-mce-consumer/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
mce_consumer_scan:
|
mce_consumer_scan:
|
||||||
name: "[Monitoring] Scan MCE consumer images for vulnerabilities"
|
name: "[Monitoring] Scan MCE consumer images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, mce_consumer_build]
|
needs: [setup, smoke_test_lint,mce_consumer_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # for actions/checkout to fetch code
|
contents: read # for actions/checkout to fetch code
|
||||||
@ -356,27 +401,22 @@ jobs:
|
|||||||
|
|
||||||
datahub_upgrade_build:
|
datahub_upgrade_build:
|
||||||
name: Build and Push DataHub Upgrade Docker Image
|
name: Build and Push DataHub Upgrade Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: "zulu"
|
|
||||||
java-version: 17
|
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Pre-build artifacts for docker image
|
|
||||||
run: |
|
- name: Set up Depot CLI
|
||||||
./gradlew :datahub-upgrade:build -x test --parallel
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
mv ./datahub-upgrade/build/libs/datahub-upgrade.jar .
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -386,13 +426,13 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/datahub-upgrade/docker
|
||||||
file: ./docker/datahub-upgrade/Dockerfile
|
file: ./docker/datahub-upgrade/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
datahub_upgrade_scan:
|
datahub_upgrade_scan:
|
||||||
name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities"
|
name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, datahub_upgrade_build]
|
needs: [setup, smoke_test_lint,datahub_upgrade_build]
|
||||||
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # for actions/checkout to fetch code
|
contents: read # for actions/checkout to fetch code
|
||||||
@ -427,27 +467,22 @@ jobs:
|
|||||||
|
|
||||||
frontend_build:
|
frontend_build:
|
||||||
name: Build and Push DataHub Frontend Docker Image
|
name: Build and Push DataHub Frontend Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true'}}
|
if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true'}}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
|
||||||
with:
|
|
||||||
distribution: "zulu"
|
|
||||||
java-version: 17
|
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Pre-build artifacts for docker image
|
|
||||||
run: |
|
- name: Set up Depot CLI
|
||||||
./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
mv ./datahub-frontend/build/stage/main .
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -457,13 +492,13 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/datahub-frontend/docker
|
||||||
file: ./docker/datahub-frontend/Dockerfile
|
file: ./docker/datahub-frontend/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
frontend_scan:
|
frontend_scan:
|
||||||
name: "[Monitoring] Scan Frontend images for vulnerabilities"
|
name: "[Monitoring] Scan Frontend images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, frontend_build]
|
needs: [setup, smoke_test_lint,frontend_build]
|
||||||
if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read # for actions/checkout to fetch code
|
contents: read # for actions/checkout to fetch code
|
||||||
@ -498,17 +533,22 @@ jobs:
|
|||||||
|
|
||||||
kafka_setup_build:
|
kafka_setup_build:
|
||||||
name: Build and Push DataHub Kafka Setup Docker Image
|
name: Build and Push DataHub Kafka Setup Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -518,7 +558,7 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/docker/kafka-setup/docker
|
||||||
file: ./docker/kafka-setup/Dockerfile
|
file: ./docker/kafka-setup/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
kafka_setup_scan:
|
kafka_setup_scan:
|
||||||
@ -527,8 +567,8 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan Kafka Setup images for vulnerabilities"
|
name: "[Monitoring] Scan Kafka Setup images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [ setup, kafka_setup_build ]
|
needs: [ setup, smoke_test_lint,kafka_setup_build]
|
||||||
if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout # adding checkout step just to make trivy upload happy
|
- name: Checkout # adding checkout step just to make trivy upload happy
|
||||||
@ -559,17 +599,22 @@ jobs:
|
|||||||
|
|
||||||
mysql_setup_build:
|
mysql_setup_build:
|
||||||
name: Build and Push DataHub MySQL Setup Docker Image
|
name: Build and Push DataHub MySQL Setup Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
if: ${{ false || needs.setup.outputs.mysql_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -579,7 +624,7 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/docker/mysql-setup/docker
|
||||||
file: ./docker/mysql-setup/Dockerfile
|
file: ./docker/mysql-setup/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
mysql_setup_scan:
|
mysql_setup_scan:
|
||||||
@ -588,8 +633,8 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan MySQL Setup images for vulnerabilities"
|
name: "[Monitoring] Scan MySQL Setup images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [ setup, mysql_setup_build ]
|
needs: [ setup, smoke_test_lint,mysql_setup_build ]
|
||||||
if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout # adding checkout step just to make trivy upload happy
|
- name: Checkout # adding checkout step just to make trivy upload happy
|
||||||
@ -620,17 +665,22 @@ jobs:
|
|||||||
|
|
||||||
elasticsearch_setup_build:
|
elasticsearch_setup_build:
|
||||||
name: Build and Push DataHub Elasticsearch Setup Docker Image
|
name: Build and Push DataHub Elasticsearch Setup Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: setup
|
needs: [setup, base_build]
|
||||||
if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' ) }}
|
if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' ) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
with:
|
with:
|
||||||
@ -640,7 +690,7 @@ jobs:
|
|||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
context: .
|
context: ./build/dockerBuildContext/docker/elasticsearch-setup/docker
|
||||||
file: ./docker/elasticsearch-setup/Dockerfile
|
file: ./docker/elasticsearch-setup/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64,linux/arm64/v8
|
||||||
elasticsearch_setup_scan:
|
elasticsearch_setup_scan:
|
||||||
@ -649,7 +699,7 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan ElasticSearch setup images for vulnerabilities"
|
name: "[Monitoring] Scan ElasticSearch setup images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [ setup, elasticsearch_setup_build ]
|
needs: [ setup, elasticsearch_setup_build ]
|
||||||
if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' ) }}
|
if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' ) }}
|
||||||
steps:
|
steps:
|
||||||
@ -681,19 +731,24 @@ jobs:
|
|||||||
|
|
||||||
datahub_ingestion_base_build:
|
datahub_ingestion_base_build:
|
||||||
name: Build and Push DataHub Ingestion (Base) Docker Image
|
name: Build and Push DataHub Ingestion (Base) Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
needs: setup
|
needs: setup
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- name: Build and push Base Image
|
- name: Build and push Base Image
|
||||||
if: ${{ needs.setup.outputs.ingestion_base_change == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_base_change == 'true' }}
|
||||||
uses: ./.github/actions/docker-custom-build-and-push
|
uses: ./.github/actions/docker-custom-build-and-push
|
||||||
@ -714,10 +769,10 @@ jobs:
|
|||||||
run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }}" >> "$GITHUB_OUTPUT"
|
run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }}" >> "$GITHUB_OUTPUT"
|
||||||
datahub_ingestion_base_slim_build:
|
datahub_ingestion_base_slim_build:
|
||||||
name: Build and Push DataHub Ingestion (Base-Slim) Docker Image
|
name: Build and Push DataHub Ingestion (Base-Slim) Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
needs: [setup, datahub_ingestion_base_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_base_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
- name: Free up disk space
|
||||||
@ -762,17 +817,12 @@ jobs:
|
|||||||
run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> "$GITHUB_OUTPUT"
|
run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> "$GITHUB_OUTPUT"
|
||||||
datahub_ingestion_base_full_build:
|
datahub_ingestion_base_full_build:
|
||||||
name: Build and Push DataHub Ingestion (Base-Full) Docker Image
|
name: Build and Push DataHub Ingestion (Base-Full) Docker Image
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
needs: [setup, datahub_ingestion_base_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_base_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- name: Download Base Image
|
- name: Download Base Image
|
||||||
@ -809,18 +859,13 @@ jobs:
|
|||||||
|
|
||||||
datahub_ingestion_slim_build:
|
datahub_ingestion_slim_build:
|
||||||
name: Build and Push DataHub Ingestion Docker Images
|
name: Build and Push DataHub Ingestion Docker Images
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true') }}
|
needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true') }}
|
||||||
needs: [setup, datahub_ingestion_base_slim_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_base_slim_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
@ -832,7 +877,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
distribution: "zulu"
|
distribution: "zulu"
|
||||||
java-version: 17
|
java-version: 17
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
|
||||||
- name: Build codegen
|
- name: Build codegen
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish =='true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish =='true' }}
|
||||||
run: ./gradlew :metadata-ingestion:codegen
|
run: ./gradlew :metadata-ingestion:codegen
|
||||||
@ -878,8 +922,8 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities"
|
name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, datahub_ingestion_slim_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_slim_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout # adding checkout step just to make trivy upload happy
|
- name: Checkout # adding checkout step just to make trivy upload happy
|
||||||
@ -911,18 +955,13 @@ jobs:
|
|||||||
|
|
||||||
datahub_ingestion_full_build:
|
datahub_ingestion_full_build:
|
||||||
name: Build and Push DataHub Ingestion (Full) Docker Images
|
name: Build and Push DataHub Ingestion (Full) Docker Images
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
outputs:
|
outputs:
|
||||||
tag: ${{ steps.tag.outputs.tag }}
|
tag: ${{ steps.tag.outputs.tag }}
|
||||||
needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) }}
|
needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) }}
|
||||||
needs: [setup, datahub_ingestion_base_full_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_base_full_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
|
||||||
run: |
|
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
|
||||||
sudo docker image prune -a -f || true
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
@ -934,7 +973,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
distribution: "zulu"
|
distribution: "zulu"
|
||||||
java-version: 17
|
java-version: 17
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
#- uses: gradle/actions/setup-gradle@v4
|
||||||
- name: Build codegen
|
- name: Build codegen
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
run: ./gradlew :metadata-ingestion:codegen
|
run: ./gradlew :metadata-ingestion:codegen
|
||||||
@ -978,8 +1017,8 @@ jobs:
|
|||||||
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
|
||||||
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
|
||||||
name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities"
|
name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities"
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04
|
||||||
needs: [setup, datahub_ingestion_full_build]
|
needs: [setup, smoke_test_lint,datahub_ingestion_full_build]
|
||||||
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout # adding checkout step just to make trivy upload happy
|
- name: Checkout # adding checkout step just to make trivy upload happy
|
||||||
@ -1010,7 +1049,7 @@ jobs:
|
|||||||
sarif_file: "trivy-results.sarif"
|
sarif_file: "trivy-results.sarif"
|
||||||
|
|
||||||
smoke_test_matrix:
|
smoke_test_matrix:
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04-small
|
||||||
needs: setup
|
needs: setup
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
@ -1023,8 +1062,8 @@ jobs:
|
|||||||
# python_batch_count is used to split pytests in the smoke-test (batches of actual test functions)
|
# python_batch_count is used to split pytests in the smoke-test (batches of actual test functions)
|
||||||
# cypress_batch_count is used to split the collection of cypress test specs into batches.
|
# cypress_batch_count is used to split the collection of cypress test specs into batches.
|
||||||
run: |
|
run: |
|
||||||
echo "cypress_batch_count=5" >> "$GITHUB_OUTPUT"
|
echo "cypress_batch_count=11" >> "$GITHUB_OUTPUT"
|
||||||
echo "python_batch_count=3" >> "$GITHUB_OUTPUT"
|
echo "python_batch_count=6" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- id: set-matrix
|
- id: set-matrix
|
||||||
# For m batches for python and n batches for cypress, we need a test matrix of python x m + cypress x n.
|
# For m batches for python and n batches for cypress, we need a test matrix of python x m + cypress x n.
|
||||||
@ -1050,20 +1089,13 @@ jobs:
|
|||||||
|
|
||||||
smoke_test:
|
smoke_test:
|
||||||
name: Run Smoke Tests
|
name: Run Smoke Tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: depot-ubuntu-24.04-4
|
||||||
needs:
|
needs:
|
||||||
[
|
[
|
||||||
setup,
|
setup,
|
||||||
smoke_test_matrix,
|
smoke_test_matrix,
|
||||||
gms_build,
|
base_build,
|
||||||
frontend_build,
|
#datahub_ingestion_slim_build,
|
||||||
kafka_setup_build,
|
|
||||||
mysql_setup_build,
|
|
||||||
elasticsearch_setup_build,
|
|
||||||
mae_consumer_build,
|
|
||||||
mce_consumer_build,
|
|
||||||
datahub_upgrade_build,
|
|
||||||
datahub_ingestion_slim_build,
|
|
||||||
]
|
]
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
@ -1071,129 +1103,69 @@ jobs:
|
|||||||
if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }}
|
if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Free up disk space
|
- name: Free up disk space
|
||||||
|
if: false # dont need this on depot
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get remove 'dotnet-*' azure-cli || true
|
sudo apt-get remove 'dotnet-*' azure-cli || true
|
||||||
sudo rm -rf /usr/local/lib/android/ || true
|
sudo rm -rf /usr/local/lib/android/ || true
|
||||||
sudo docker image prune -a -f || true
|
sudo docker image prune -a -f || true
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cache/uv
|
||||||
|
key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }}
|
||||||
|
|
||||||
|
- uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.npm
|
||||||
|
~/.cache/Cypress
|
||||||
|
~/.cache/yarn
|
||||||
|
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||||
|
|
||||||
- name: Check out the repo
|
- name: Check out the repo
|
||||||
uses: acryldata/sane-checkout-action@v3
|
uses: acryldata/sane-checkout-action@v3
|
||||||
|
|
||||||
|
|
||||||
|
- name: Set up Depot CLI
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
uses: depot/setup-action@v1
|
||||||
|
|
||||||
|
- name: configure-docker
|
||||||
|
if: ${{ env.DOCKER_CACHE == 'DEPOT' }}
|
||||||
|
run: |
|
||||||
|
depot configure-docker
|
||||||
|
|
||||||
|
- uses: actions/cache/restore@v4
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/build/dockerBuildContext/
|
||||||
|
key: ${{ runner.os }}-docker-${{ github.sha }}
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
cache: "pip"
|
cache: "pip"
|
||||||
- name: Set up JDK 17
|
|
||||||
uses: actions/setup-java@v4
|
#- uses: gradle/actions/setup-gradle@v4
|
||||||
with:
|
|
||||||
distribution: "zulu"
|
# - name: Login to DockerHub
|
||||||
java-version: 17
|
# uses: docker/login-action@v3
|
||||||
- uses: gradle/actions/setup-gradle@v4
|
# if: ${{ needs.setup.outputs.docker-login == 'true' }}
|
||||||
- name: Login to DockerHub
|
# with:
|
||||||
uses: docker/login-action@v3
|
# username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
||||||
if: ${{ needs.setup.outputs.docker-login == 'true' }}
|
# password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
||||||
with:
|
|
||||||
username: ${{ secrets.ACRYL_DOCKER_USERNAME }}
|
- name: build images
|
||||||
password: ${{ secrets.ACRYL_DOCKER_PASSWORD }}
|
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: Remove images
|
|
||||||
run: docker image prune -a -f || true
|
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: Download GMS image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.gms_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download Frontend image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.frontend_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download Kafka Setup image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.kafka_setup_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download Mysql Setup image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mysql_setup_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download Elastic Setup image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.elasticsearch_setup_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download MCE Consumer image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mce_consumer_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download MAE Consumer image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mae_consumer_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download upgrade image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.datahub_upgrade_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}
|
|
||||||
- name: Download datahub-ingestion-slim image
|
|
||||||
uses: ishworkh/container-image-artifact-download@v2.0.0
|
|
||||||
if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }}
|
|
||||||
with:
|
|
||||||
image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}
|
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: CI Optimization Head Images
|
|
||||||
# When publishing all tests/images are built (no optimizations)
|
|
||||||
if: ${{ needs.setup.outputs.publish != 'true' }}
|
|
||||||
run: |
|
run: |
|
||||||
if [ '${{ needs.setup.outputs.backend_change }}' == 'false' ]; then
|
./gradlew :docker:buildImagesFromCacheQuickstartDebugConsumers -PreleaseVersion=${{ needs.setup.outputs.unique_tag }} &
|
||||||
echo 'GMS/Upgrade/MCE/MAE head images'
|
docker pull confluentinc/cp-kafka:7.4.0 &
|
||||||
docker pull '${{ env.DATAHUB_GMS_IMAGE }}:head'
|
docker pull mysql:8.2 &
|
||||||
docker pull '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head'
|
docker pull opensearchproject/opensearch:2.9.0 &
|
||||||
docker pull '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head'
|
docker pull ${{ env.DATAHUB_INGESTION_IMAGE }}:head &
|
||||||
docker pull '${{ env.DATAHUB_UPGRADE_IMAGE }}:head'
|
|
||||||
docker tag '${{ env.DATAHUB_GMS_IMAGE }}:head' '${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
wait
|
||||||
docker tag '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
docker images
|
||||||
docker tag '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
docker tag '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' '${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
if [ '${{ needs.setup.outputs.frontend_change }}' == 'false' ]; then
|
|
||||||
echo 'Frontend head images'
|
|
||||||
docker pull '${{ env.DATAHUB_FRONTEND_IMAGE }}:head'
|
|
||||||
docker tag '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' '${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
if [ '${{ needs.setup.outputs.kafka_setup_change }}' == 'false' ]; then
|
|
||||||
echo 'kafka-setup head images'
|
|
||||||
docker pull '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head'
|
|
||||||
docker tag '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
if [ '${{ needs.setup.outputs.mysql_setup_change }}' == 'false' ]; then
|
|
||||||
echo 'mysql-setup head images'
|
|
||||||
docker pull '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head'
|
|
||||||
docker tag '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
if [ '${{ needs.setup.outputs.elasticsearch_setup_change }}' == 'false' ]; then
|
|
||||||
echo 'elasticsearch-setup head images'
|
|
||||||
docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head'
|
|
||||||
docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
- name: CI Slim Head Images
|
|
||||||
run: |
|
|
||||||
if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then
|
|
||||||
echo 'datahub-ingestion head-slim images'
|
|
||||||
docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim'
|
|
||||||
if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then
|
|
||||||
docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.setup.outputs.unique_tag }}'
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: run quickstart
|
- name: run quickstart
|
||||||
env:
|
env:
|
||||||
DATAHUB_TELEMETRY_ENABLED: false
|
DATAHUB_TELEMETRY_ENABLED: false
|
||||||
@ -1204,8 +1176,10 @@ jobs:
|
|||||||
ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml"
|
ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml"
|
||||||
run: |
|
run: |
|
||||||
./smoke-test/run-quickstart.sh
|
./smoke-test/run-quickstart.sh
|
||||||
|
|
||||||
- name: Disk Check
|
- name: Disk Check
|
||||||
run: df -h . && docker images
|
run: df -h . && docker images
|
||||||
|
|
||||||
- name: Disable ES Disk Threshold
|
- name: Disable ES Disk Threshold
|
||||||
run: |
|
run: |
|
||||||
curl -XPUT "http://localhost:9200/_cluster/settings" \
|
curl -XPUT "http://localhost:9200/_cluster/settings" \
|
||||||
@ -1218,19 +1192,15 @@ jobs:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}'
|
}'
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: ./metadata-ingestion/scripts/install_deps.sh
|
run: ./metadata-ingestion/scripts/install_deps.sh
|
||||||
|
|
||||||
- name: Build datahub cli
|
- name: Build datahub cli
|
||||||
run: |
|
run: |
|
||||||
./gradlew :metadata-ingestion:install
|
./gradlew :metadata-ingestion:install
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: Remove Source Code
|
|
||||||
run: find ./*/* ! -path "./metadata-ingestion*" ! -path "./smoke-test*" ! -path "./gradle*" -delete
|
|
||||||
- name: Disk Check
|
|
||||||
run: df -h . && docker images
|
|
||||||
- name: Smoke test
|
- name: Smoke test
|
||||||
env:
|
env:
|
||||||
RUN_QUICKSTART: false
|
RUN_QUICKSTART: false
|
||||||
@ -1244,8 +1214,10 @@ jobs:
|
|||||||
echo "$DATAHUB_VERSION"
|
echo "$DATAHUB_VERSION"
|
||||||
./gradlew --stop
|
./gradlew --stop
|
||||||
./smoke-test/smoke.sh
|
./smoke-test/smoke.sh
|
||||||
|
|
||||||
- name: Disk Check
|
- name: Disk Check
|
||||||
run: df -h . && docker images
|
run: df -h . && docker images
|
||||||
|
|
||||||
- name: store logs
|
- name: store logs
|
||||||
if: failure()
|
if: failure()
|
||||||
run: |
|
run: |
|
||||||
@ -1279,10 +1251,11 @@ jobs:
|
|||||||
uses: codecov/test-results-action@v1
|
uses: codecov/test-results-action@v1
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
deploy_datahub_head:
|
deploy_datahub_head:
|
||||||
name: Deploy to Datahub HEAD
|
name: Deploy to Datahub HEAD
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [setup, smoke_test]
|
needs: [setup, smoke_test_lint,smoke_test]
|
||||||
steps:
|
steps:
|
||||||
- uses: aws-actions/configure-aws-credentials@v4
|
- uses: aws-actions/configure-aws-credentials@v4
|
||||||
if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }}
|
if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }}
|
||||||
|
@ -76,7 +76,7 @@ task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', distZip]
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(stageMainDist)
|
dependsOn(stageMainDist)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files "${buildDir}/stage"
|
files "${buildDir}/stage"
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
|
@ -172,7 +172,7 @@ task runNoCode(type: Exec) {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(bootJar)
|
dependsOn(bootJar)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||||
files bootJar.outputs.files
|
files bootJar.outputs.files
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
|
@ -257,6 +257,7 @@ quickstart_configs.each { taskName, config ->
|
|||||||
// Only restart containers that had their modules rebuilt
|
// Only restart containers that had their modules rebuilt
|
||||||
if (containersToRestart) {
|
if (containersToRestart) {
|
||||||
def cmd = ["docker compose -p datahub --profile ${config.profile}"] + ['-f', compose_base] + ['restart'] + containersToRestart
|
def cmd = ["docker compose -p datahub --profile ${config.profile}"] + ['-f', compose_base] + ['restart'] + containersToRestart
|
||||||
|
println(cmd.join(" "))
|
||||||
commandLine 'bash', '-c', cmd.join(" ")
|
commandLine 'bash', '-c', cmd.join(" ")
|
||||||
} else {
|
} else {
|
||||||
// If no containers need restart, make this a no-op
|
// If no containers need restart, make this a no-op
|
||||||
|
@ -17,8 +17,7 @@ ext {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn build
|
dependsOn build
|
||||||
name "${docker_registry}/${docker_repo}:v${docker_version}"
|
name "${docker_registry}/${docker_repo}:${docker_version}"
|
||||||
//version "v${docker_version}"
|
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -22,7 +22,7 @@ dependencies {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn 'build', ':docker:datahub-ingestion-base:docker', ':metadata-ingestion:codegen'
|
dependsOn 'build', ':docker:datahub-ingestion-base:docker', ':metadata-ingestion:codegen'
|
||||||
name "${docker_registry}/${docker_repo}:v${docker_version}"
|
name "${docker_registry}/${docker_repo}:${docker_version}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile${docker_target == "slim" ? "-slim-only" : ""}")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile${docker_target == "slim" ? "-slim-only" : ""}")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -12,7 +12,7 @@ ext {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(build)
|
dependsOn(build)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -12,7 +12,7 @@ ext {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(build)
|
dependsOn(build)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -13,7 +13,7 @@ ext {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn build
|
dependsOn build
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -12,7 +12,7 @@ ext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
docker {
|
docker {
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile")
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
include '.dockerignore'
|
include '.dockerignore'
|
||||||
|
@ -23,10 +23,14 @@ def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
|
|||||||
def cliMajorVersion = "0.15.0" // base default cli major version
|
def cliMajorVersion = "0.15.0" // base default cli major version
|
||||||
def snapshotVersion = false
|
def snapshotVersion = false
|
||||||
def javaVersion = ""
|
def javaVersion = ""
|
||||||
|
// tag for docker images. the prefix v is used in tag only if the tag is computed from a version. If a releaseVersion is supplied, use it as is.
|
||||||
|
// This enables pr tags to be used without the v prefix. This variance was previouslyhandled in the CI steps build images without using gradle.
|
||||||
|
def versionTag = ""
|
||||||
|
|
||||||
if (project.hasProperty("releaseVersion")) {
|
if (project.hasProperty("releaseVersion")) {
|
||||||
version = releaseVersion
|
version = releaseVersion
|
||||||
detailedVersionString = releaseVersion
|
detailedVersionString = releaseVersion
|
||||||
|
versionTag = releaseVersion
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
// apply this plugin in a try-catch block so that we can handle cases without .git directory
|
// apply this plugin in a try-catch block so that we can handle cases without .git directory
|
||||||
@ -35,6 +39,7 @@ if (project.hasProperty("releaseVersion")) {
|
|||||||
detailedVersionString = gitVersion()
|
detailedVersionString = gitVersion()
|
||||||
version = details.lastTag
|
version = details.lastTag
|
||||||
version = version.startsWith("v")? version.substring(1): version
|
version = version.startsWith("v")? version.substring(1): version
|
||||||
|
versionTag = "v" + version
|
||||||
def suffix = details.isCleanTag? "": "-SNAPSHOT"
|
def suffix = details.isCleanTag? "": "-SNAPSHOT"
|
||||||
snapshotVersion = ! details.isCleanTag
|
snapshotVersion = ! details.isCleanTag
|
||||||
}
|
}
|
||||||
@ -78,6 +83,7 @@ if (snapshotVersion) {
|
|||||||
// we are unable to part the last token as an integer, so we just append SNAPSHOT to this version
|
// we are unable to part the last token as an integer, so we just append SNAPSHOT to this version
|
||||||
javaVersion = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT'
|
javaVersion = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT'
|
||||||
}
|
}
|
||||||
|
versionTag = "v" + version
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: No task, we want this executed during config phase, once for rootProject.
|
// Note: No task, we want this executed during config phase, once for rootProject.
|
||||||
@ -85,7 +91,8 @@ def data = [
|
|||||||
fullVersion: detailedVersionString,
|
fullVersion: detailedVersionString,
|
||||||
cliMajorVersion: cliMajorVersion,
|
cliMajorVersion: cliMajorVersion,
|
||||||
version: version,
|
version: version,
|
||||||
javaVersion: javaVersion
|
javaVersion: javaVersion,
|
||||||
|
versionTag: versionTag
|
||||||
]
|
]
|
||||||
|
|
||||||
// Convert to JSON
|
// Convert to JSON
|
||||||
|
@ -4,9 +4,12 @@ import org.apache.tools.ant.filters.ReplaceTokens
|
|||||||
|
|
||||||
def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
|
def detailedVersionString = "0.0.0-unknown-SNAPSHOT"
|
||||||
def cliMajorVersion = "0.15.0" // base default cli major version
|
def cliMajorVersion = "0.15.0" // base default cli major version
|
||||||
|
|
||||||
def inputFile = file("${rootProject.buildDir}/version.json")
|
def inputFile = file("${rootProject.buildDir}/version.json")
|
||||||
|
|
||||||
|
ext {
|
||||||
|
versionTag = "v${detailedVersionString}"
|
||||||
|
}
|
||||||
|
|
||||||
task readJsonData {
|
task readJsonData {
|
||||||
if (inputFile.exists()) {
|
if (inputFile.exists()) {
|
||||||
def jsonSlurper = new JsonSlurper()
|
def jsonSlurper = new JsonSlurper()
|
||||||
@ -15,6 +18,7 @@ task readJsonData {
|
|||||||
detailedVersionString = data.fullVersion
|
detailedVersionString = data.fullVersion
|
||||||
cliMajorVersion = data.cliMajorVersion
|
cliMajorVersion = data.cliMajorVersion
|
||||||
version = data.version
|
version = data.version
|
||||||
|
versionTag = data.versionTag
|
||||||
} else {
|
} else {
|
||||||
println "git.properties JSON file not found: ${inputFile.path}"
|
println "git.properties JSON file not found: ${inputFile.path}"
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ bootJar {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(bootJar)
|
dependsOn(bootJar)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
//version "v${version}"
|
//version "v${version}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||||
files bootJar.outputs.files
|
files bootJar.outputs.files
|
||||||
|
@ -58,7 +58,7 @@ bootJar {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn(bootJar)
|
dependsOn(bootJar)
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
//version "v${version}"
|
//version "v${version}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||||
files bootJar.outputs.files
|
files bootJar.outputs.files
|
||||||
|
@ -86,7 +86,7 @@ bootRun {
|
|||||||
|
|
||||||
docker {
|
docker {
|
||||||
dependsOn bootJar
|
dependsOn bootJar
|
||||||
name "${docker_registry}/${docker_repo}:v${version}"
|
name "${docker_registry}/${docker_repo}:${versionTag}"
|
||||||
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile")
|
||||||
files bootJar.outputs.files
|
files bootJar.outputs.files
|
||||||
files fileTree(rootProject.projectDir) {
|
files fileTree(rootProject.projectDir) {
|
||||||
@ -123,4 +123,4 @@ test {
|
|||||||
"com.linkedin.gms.ServletConfig",
|
"com.linkedin.gms.ServletConfig",
|
||||||
"com.linkedin.gms.GMSApplication"]
|
"com.linkedin.gms.GMSApplication"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,6 +49,43 @@ def pytest_sessionfinish(session, exitstatus):
|
|||||||
send_message(exitstatus)
|
send_message(exitstatus)
|
||||||
|
|
||||||
|
|
||||||
|
def bin_pack_tasks(tasks, n_buckets):
|
||||||
|
"""
|
||||||
|
Bin-pack tasks into n_buckets with roughly equal weights.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
tasks (list): List of (task, weight) tuples. If only task is provided, weight defaults to 1.
|
||||||
|
n_buckets (int): Number of buckets to distribute tasks into.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: List of buckets, where each bucket is a list of tasks.
|
||||||
|
"""
|
||||||
|
# Normalize the tasks to ensure they're all (task, weight) tuples
|
||||||
|
normalized_tasks = []
|
||||||
|
for task in tasks:
|
||||||
|
if isinstance(task, tuple) and len(task) == 2:
|
||||||
|
normalized_tasks.append(task)
|
||||||
|
else:
|
||||||
|
normalized_tasks.append((task, 1))
|
||||||
|
|
||||||
|
# Sort tasks by weight in descending order
|
||||||
|
sorted_tasks = sorted(normalized_tasks, key=lambda x: x[1], reverse=True)
|
||||||
|
|
||||||
|
# Initialize the buckets with zero weight
|
||||||
|
buckets: List = [[] for _ in range(n_buckets)]
|
||||||
|
bucket_weights: List[int] = [0] * n_buckets
|
||||||
|
|
||||||
|
# Assign each task to the bucket with the lowest current weight
|
||||||
|
for task, weight in sorted_tasks:
|
||||||
|
# Find the bucket with the minimum weight
|
||||||
|
min_bucket_idx = bucket_weights.index(min(bucket_weights))
|
||||||
|
|
||||||
|
# Add the task to this bucket
|
||||||
|
buckets[min_bucket_idx].append(task)
|
||||||
|
bucket_weights[min_bucket_idx] += weight
|
||||||
|
|
||||||
|
return buckets
|
||||||
|
|
||||||
def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
|
def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
|
||||||
batch_count_env = os.getenv("BATCH_COUNT", 1)
|
batch_count_env = os.getenv("BATCH_COUNT", 1)
|
||||||
batch_count = int(batch_count_env)
|
batch_count = int(batch_count_env)
|
||||||
@ -71,8 +108,8 @@ def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
|
|||||||
batch_end = batch_start + batch_size
|
batch_end = batch_start + batch_size
|
||||||
# We must have exactly as many batches as specified by BATCH_COUNT.
|
# We must have exactly as many batches as specified by BATCH_COUNT.
|
||||||
if (
|
if (
|
||||||
num_tests - batch_end < batch_size
|
batch_number == batch_count - 1 # this is the last batch
|
||||||
): # We must have exactly as many batches as specified by BATCH_COUNT, put the remaining in the last batch.
|
): # If ths is last batch put any remaining tests in the last batch.
|
||||||
batch_end = num_tests
|
batch_end = num_tests
|
||||||
|
|
||||||
if batch_count > 0:
|
if batch_count > 0:
|
||||||
@ -80,7 +117,6 @@ def get_batch_start_end(num_tests: int) -> Tuple[int, int]:
|
|||||||
|
|
||||||
return batch_start, batch_end
|
return batch_start, batch_end
|
||||||
|
|
||||||
|
|
||||||
def pytest_collection_modifyitems(
|
def pytest_collection_modifyitems(
|
||||||
session: pytest.Session, config: pytest.Config, items: List[Item]
|
session: pytest.Session, config: pytest.Config, items: List[Item]
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -36,11 +36,13 @@ source ./set-cypress-creds.sh
|
|||||||
# set environment variables for the test
|
# set environment variables for the test
|
||||||
source ./set-test-env-vars.sh
|
source ./set-test-env-vars.sh
|
||||||
|
|
||||||
|
echo "TEST_STRATEGY: $TEST_STRATEGY, BATCH_COUNT: $BATCH_COUNT, BATCH_NUMBER: $BATCH_NUMBER"
|
||||||
|
|
||||||
# TEST_STRATEGY:
|
# TEST_STRATEGY:
|
||||||
# if set to pytests, runs all pytests, skips cypress tests(though cypress test launch is via a pytest).
|
# if set to pytests, runs all pytests, skips cypress tests(though cypress test launch is via a pytest).
|
||||||
# if set tp cypress, runs all cypress tests
|
# if set tp cypress, runs all cypress tests
|
||||||
# if blank, runs all.
|
# if blank, runs all.
|
||||||
# When invoked via the github action, BATCH_COUNT and BATCH_NUM env vars are set to run a slice of those tests per
|
# When invoked via the github action, BATCH_COUNT and BATCH_NUMBER env vars are set to run a slice of those tests per
|
||||||
# worker for parallelism. docker-unified.yml generates a test matrix of pytests/cypress in batches. As number of tests
|
# worker for parallelism. docker-unified.yml generates a test matrix of pytests/cypress in batches. As number of tests
|
||||||
# increase, the batch_count config (in docker-unified.yml) may need adjustment.
|
# increase, the batch_count config (in docker-unified.yml) may need adjustment.
|
||||||
if [[ "${TEST_STRATEGY}" == "pytests" ]]; then
|
if [[ "${TEST_STRATEGY}" == "pytests" ]]; then
|
||||||
@ -51,7 +53,7 @@ elif [[ "${TEST_STRATEGY}" == "cypress" ]]; then
|
|||||||
# github workflow test matrix will invoke this in multiple runners for each batch.
|
# github workflow test matrix will invoke this in multiple runners for each batch.
|
||||||
# Skipping the junit at the pytest level since cypress itself generates junits on a per-test basis. The pytest is a single test for all cypress
|
# Skipping the junit at the pytest level since cypress itself generates junits on a per-test basis. The pytest is a single test for all cypress
|
||||||
# tests and isnt very helpful.
|
# tests and isnt very helpful.
|
||||||
pytest -rP --durations=20 -vv --continue-on-collection-errors tests/cypress/integration_test.py
|
pytest -rP --durations=20 -vvs --continue-on-collection-errors tests/cypress/integration_test.py
|
||||||
else
|
else
|
||||||
pytest -rP --durations=20 -vv --continue-on-collection-errors --junit-xml=junit.smoke-all.xml
|
pytest -rP --durations=20 -vvs --continue-on-collection-errors --junit-xml=junit.smoke-all.xml
|
||||||
fi
|
fi
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import threading
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from conftest import get_batch_start_end
|
from conftest import bin_pack_tasks
|
||||||
from tests.setup.lineage.ingest_time_lineage import (
|
from tests.setup.lineage.ingest_time_lineage import (
|
||||||
get_time_lineage_urns,
|
get_time_lineage_urns,
|
||||||
ingest_time_lineage,
|
ingest_time_lineage,
|
||||||
@ -196,10 +198,25 @@ def _get_cypress_tests_batch():
|
|||||||
"""
|
"""
|
||||||
all_tests = _get_js_files("tests/cypress/cypress/e2e")
|
all_tests = _get_js_files("tests/cypress/cypress/e2e")
|
||||||
|
|
||||||
batch_start, batch_end = get_batch_start_end(num_tests=len(all_tests))
|
tests_with_weights = []
|
||||||
|
|
||||||
return all_tests[batch_start:batch_end]
|
with open("tests/cypress/test_weights.json") as f:
|
||||||
# return test_batches[int(batch_number)] #if BATCH_NUMBER was set, we this test just runs that one batch.
|
weights_data = json.load(f)
|
||||||
|
|
||||||
|
# File has file path relative to cypress/e2e folder and duration in seconds (with s suffix), pulled from codecov report.
|
||||||
|
# Use some other method to automate finding the weights - may be use junits directly
|
||||||
|
test_weights = {
|
||||||
|
item["filePath"]: float(item["duration"][:-1]) for item in weights_data
|
||||||
|
}
|
||||||
|
|
||||||
|
for test in all_tests:
|
||||||
|
if test in test_weights:
|
||||||
|
tests_with_weights.append((test, test_weights[test]))
|
||||||
|
else:
|
||||||
|
tests_with_weights.append(test)
|
||||||
|
|
||||||
|
test_batches = bin_pack_tasks(tests_with_weights, int(os.getenv("BATCH_COUNT", 1)))
|
||||||
|
return test_batches[int(os.getenv("BATCH_NUMBER", 0))]
|
||||||
|
|
||||||
|
|
||||||
def test_run_cypress(auth_session):
|
def test_run_cypress(auth_session):
|
||||||
@ -225,7 +242,8 @@ def test_run_cypress(auth_session):
|
|||||||
test_spec_arg = f" --spec '{specs_str}' "
|
test_spec_arg = f" --spec '{specs_str}' "
|
||||||
|
|
||||||
print("Running Cypress tests with command")
|
print("Running Cypress tests with command")
|
||||||
command = f"NO_COLOR=1 npx cypress run {record_arg} {test_spec_arg} {tag_arg}"
|
node_options = "--max-old-space-size=6000"
|
||||||
|
command = f'NO_COLOR=1 NODE_OPTIONS="{node_options}" npx cypress run {record_arg} {test_spec_arg} {tag_arg} --config numTestsKeptInMemory=2'
|
||||||
print(command)
|
print(command)
|
||||||
# Add --headed --spec '**/mutations/mutations.js' (change spec name)
|
# Add --headed --spec '**/mutations/mutations.js' (change spec name)
|
||||||
# in case you want to see the browser for debugging
|
# in case you want to see the browser for debugging
|
||||||
@ -236,15 +254,39 @@ def test_run_cypress(auth_session):
|
|||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
cwd=f"{CYPRESS_TEST_DATA_DIR}",
|
cwd=f"{CYPRESS_TEST_DATA_DIR}",
|
||||||
|
text=True, # Use text mode for string output
|
||||||
|
bufsize=1, # Line buffered
|
||||||
)
|
)
|
||||||
assert proc.stdout is not None
|
assert proc.stdout is not None
|
||||||
assert proc.stderr is not None
|
assert proc.stderr is not None
|
||||||
stdout = proc.stdout.read()
|
|
||||||
stderr = proc.stderr.read()
|
# Function to read and print output from a pipe
|
||||||
|
def read_and_print(pipe, prefix=""):
|
||||||
|
for line in pipe:
|
||||||
|
print(f"{prefix}{line}", end="")
|
||||||
|
|
||||||
|
# Read and print output in real-time
|
||||||
|
|
||||||
|
stdout_thread = threading.Thread(target=read_and_print, args=(proc.stdout,))
|
||||||
|
stderr_thread = threading.Thread(
|
||||||
|
target=read_and_print, args=(proc.stderr, "stderr: ")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set threads as daemon so they exit when the main thread exits
|
||||||
|
stdout_thread.daemon = True
|
||||||
|
stderr_thread.daemon = True
|
||||||
|
|
||||||
|
# Start the threads
|
||||||
|
stdout_thread.start()
|
||||||
|
stderr_thread.start()
|
||||||
|
|
||||||
|
# Wait for the process to complete
|
||||||
return_code = proc.wait()
|
return_code = proc.wait()
|
||||||
print(stdout.decode("utf-8"))
|
|
||||||
print("stderr output:")
|
# Wait for the threads to finish
|
||||||
print(stderr.decode("utf-8"))
|
stdout_thread.join()
|
||||||
|
stderr_thread.join()
|
||||||
|
|
||||||
print("return code", return_code)
|
print("return code", return_code)
|
||||||
print_now()
|
print_now()
|
||||||
assert return_code == 0
|
assert return_code == 0
|
||||||
|
310
smoke-test/tests/cypress/test_weights.json
Normal file
310
smoke-test/tests/cypress/test_weights.json
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"filePath": "glossaryV2/v2_glossary_navigation.js",
|
||||||
|
"duration": "94.892s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/dataset_ownership.js",
|
||||||
|
"duration": "83.546s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "glossary/glossary_navigation.js",
|
||||||
|
"duration": "74.698s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutationsV2/v2_managing_secrets.js",
|
||||||
|
"duration": "74.043s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/managing_secrets.js",
|
||||||
|
"duration": "63.959s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "glossaryV2/v2_glossary.js",
|
||||||
|
"duration": "55.863s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settingsV2/v2_manage_policies.js",
|
||||||
|
"duration": "48.185s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineageV2/v2_download_lineage_results.js",
|
||||||
|
"duration": "44.230s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineage/download_lineage_results.js",
|
||||||
|
"duration": "43.302s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "glossary/glossary.js",
|
||||||
|
"duration": "37.810s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "viewV2/v2_view_select.js",
|
||||||
|
"duration": "37.262s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineageV2/v2_lineage_column_path.js",
|
||||||
|
"duration": "34.825s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutationsV2/v2_ingestion_source.js",
|
||||||
|
"duration": "33.981s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "domainsV2/v2_nested_domains.js",
|
||||||
|
"duration": "33.386s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settingsV2/v2_homePagePost.js",
|
||||||
|
"duration": "33.135s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/ingestion_source.js",
|
||||||
|
"duration": "32.170s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "views/view_select.js",
|
||||||
|
"duration": "29.705s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineage/lineage_column_path.js",
|
||||||
|
"duration": "28.825s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutationsV2/v2_managed_ingestion.js",
|
||||||
|
"duration": "28.049s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "glossaryV2/v2_glossaryTerm.js",
|
||||||
|
"duration": "26.395s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ownershipV2/v2_manage_ownership.js",
|
||||||
|
"duration": "20.270s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ownership/manage_ownership.js",
|
||||||
|
"duration": "19.061s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "schema_blame/schema_blame.js",
|
||||||
|
"duration": "18.482s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/manage_ingestion_secret_privilege.js",
|
||||||
|
"duration": "17.412s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/mutations.js",
|
||||||
|
"duration": "16.284s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settingsV2/v2_manage_access_tokens.js",
|
||||||
|
"duration": "15.832s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settings/manage_access_tokens.js",
|
||||||
|
"duration": "15.335s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/deprecations.js",
|
||||||
|
"duration": "15.296s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "domains/nested_domains.js",
|
||||||
|
"duration": "14.909s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "query/query_tab.js",
|
||||||
|
"duration": "14.025s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineageV2/v2_lineage_column_level.js",
|
||||||
|
"duration": "13.875s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/add_users.js",
|
||||||
|
"duration": "13.665s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "search/query_and_filter_search.js",
|
||||||
|
"duration": "13.633s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/edit_documentation.js",
|
||||||
|
"duration": "13.068s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settings/homePagePost.js",
|
||||||
|
"duration": "12.013s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settings/manage_policies.js",
|
||||||
|
"duration": "11.966s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/domains.js",
|
||||||
|
"duration": "11.640s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "analytics/analytics.js",
|
||||||
|
"duration": "11.405s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settingsV2/v2_managing_groups.js",
|
||||||
|
"duration": "10.545s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "settings/managing_groups.js",
|
||||||
|
"duration": "10.482s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineage/lineage_column_level.js",
|
||||||
|
"duration": "10.041s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "viewV2/v2_manage_views.js",
|
||||||
|
"duration": "10.010s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ml/experiment.js",
|
||||||
|
"duration": "9.645s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "auto_completeV2/v2_auto_complete.js",
|
||||||
|
"duration": "9.641s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "incidentsV2/v2_incidents.js",
|
||||||
|
"duration": "9.186s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "siblingsV2/v2_siblings.js",
|
||||||
|
"duration": "8.842s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "views/manage_views.js",
|
||||||
|
"duration": "8.820s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "glossary/glossaryTerm.js",
|
||||||
|
"duration": "8.789s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineageV2/v2_impact_analysis.js",
|
||||||
|
"duration": "8.482s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "actions/docPropagation.js",
|
||||||
|
"duration": "8.416s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ml/model_mlflow.js",
|
||||||
|
"duration": "8.384s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "mutations/dataset_health.js",
|
||||||
|
"duration": "8.144s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "search/search.js",
|
||||||
|
"duration": "8.060s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "loginV2/v2_login.js",
|
||||||
|
"duration": "7.953s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "login/login.js",
|
||||||
|
"duration": "7.878s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "auto_complete/auto_complete.js",
|
||||||
|
"duration": "7.562s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "task_runV2/v2_task_runs.js",
|
||||||
|
"duration": "7.416s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "schema_blameV2/v2_schema_blame.js",
|
||||||
|
"duration": "7.348s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ml/model_sagemaker.js",
|
||||||
|
"duration": "7.162s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineage/impact_analysis.js",
|
||||||
|
"duration": "7.024s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "containersV2/v2_containers.js",
|
||||||
|
"duration": "6.872s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "task_runs/task_runs.js",
|
||||||
|
"duration": "6.423s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "ml/feature_table.js",
|
||||||
|
"duration": "6.417s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "containers/containers.js",
|
||||||
|
"duration": "6.158s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "domainsV2/v2_domains.js",
|
||||||
|
"duration": "6.074s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineageV2/v2_lineage_graph.js",
|
||||||
|
"duration": "5.932s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "home/home.js",
|
||||||
|
"duration": "5.735s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "siblings/siblings.js",
|
||||||
|
"duration": "5.621s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "operrationsV2/v2_operations.js",
|
||||||
|
"duration": "5.504s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "operations/operations.js",
|
||||||
|
"duration": "5.264s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "search/searchFilters.js",
|
||||||
|
"duration": "5.178s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "domains/domains.js",
|
||||||
|
"duration": "5.170s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "homeV2/v2_home.js",
|
||||||
|
"duration": "5.135s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "browse/browseV2.js",
|
||||||
|
"duration": "4.988s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "lineage/lineage_graph.js",
|
||||||
|
"duration": "3.938s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "businessAttribute/attribute_mutations.js",
|
||||||
|
"duration": "3.126s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"filePath": "businessAttribute/businessAttribute.js",
|
||||||
|
"duration": "2.232s"
|
||||||
|
}
|
||||||
|
]
|
@ -5,6 +5,7 @@ from datetime import datetime, timedelta, timezone
|
|||||||
from typing import Any, Dict, List, Tuple
|
from typing import Any, Dict, List, Tuple
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
import tenacity
|
||||||
from joblib import Parallel, delayed
|
from joblib import Parallel, delayed
|
||||||
from requests.structures import CaseInsensitiveDict
|
from requests.structures import CaseInsensitiveDict
|
||||||
|
|
||||||
@ -277,6 +278,11 @@ class TestSessionWrapper:
|
|||||||
print("TestSessionWrapper sync wait.")
|
print("TestSessionWrapper sync wait.")
|
||||||
wait_for_writes_to_sync()
|
wait_for_writes_to_sync()
|
||||||
|
|
||||||
|
@tenacity.retry(
|
||||||
|
stop=tenacity.stop_after_attempt(10),
|
||||||
|
wait=tenacity.wait_exponential(multiplier=1, min=4, max=30),
|
||||||
|
retry=tenacity.retry_if_exception_type(Exception),
|
||||||
|
)
|
||||||
def _generate_gms_token(self):
|
def _generate_gms_token(self):
|
||||||
actor_urn = self._upstream.cookies["actor"]
|
actor_urn = self._upstream.cookies["actor"]
|
||||||
json = {
|
json = {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user