mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-10-07 14:53:28 +00:00
265 lines
12 KiB
YAML
265 lines
12 KiB
YAML
# Copyright 2021 Collate
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
name: py-cli-e2e-tests
|
|
on:
|
|
schedule:
|
|
- cron: '0 0 * * *'
|
|
workflow_dispatch:
|
|
inputs:
|
|
e2e-tests:
|
|
description: "E2E Tests to run"
|
|
required: True
|
|
default: '["bigquery", "dbt_redshift", "metabase", "mssql", "mysql", "redash", "snowflake", "tableau", "powerbi", "vertica", "python", "redshift", "quicksight", "datalake_s3", "postgres", "oracle", "athena", "bigquery_multiple_project", "exasol"]'
|
|
debug:
|
|
description: "If Debugging the Pipeline, Slack and Sonar events won't be triggered [default, true or false]. Default will trigger only on main branch."
|
|
required: False
|
|
default: "default"
|
|
|
|
env:
|
|
DEBUG: "${{ (inputs.debug == 'default' && github.ref == 'refs/heads/main' && 'false') || (inputs.debug == 'default' && github.ref != 'refs/heads/main' && 'true') || inputs.debug || 'false' }}"
|
|
|
|
permissions:
|
|
id-token: write
|
|
contents: read
|
|
|
|
jobs:
|
|
# Needed since env is not available on the job context: https://github.com/actions/runner/issues/2372
|
|
check-debug:
|
|
runs-on: ubuntu-latest
|
|
outputs:
|
|
DEBUG: ${{ env.DEBUG }}
|
|
steps:
|
|
- run: echo "INPUTS_DEBUG=${{ inputs.debug }}, GITHUB_REF=${{ github.ref }}, DEBUG=$DEBUG"
|
|
|
|
py-cli-e2e-tests:
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
e2e-test: ${{ fromJSON(inputs.e2e-tests || '["bigquery", "dbt_redshift", "metabase", "mssql", "mysql", "redash", "snowflake", "tableau", "powerbi", "vertica", "python", "redshift", "quicksight", "datalake_s3", "postgres", "oracle", "athena", "bigquery_multiple_project", "exasol"]') }}
|
|
environment: test
|
|
|
|
steps:
|
|
- name: Free Disk Space (Ubuntu)
|
|
uses: jlumbroso/free-disk-space@main
|
|
with:
|
|
tool-cache: false
|
|
android: true
|
|
dotnet: true
|
|
haskell: true
|
|
large-packages: false
|
|
swap-storage: true
|
|
docker-images: false
|
|
|
|
- name: Checkout
|
|
uses: actions/checkout@v4
|
|
|
|
- name: configure aws credentials
|
|
if: contains('quicksight', matrix.e2e-test) || contains('datalake_s3', matrix.e2e-test) || contains('athena', matrix.e2e-test)
|
|
uses: aws-actions/configure-aws-credentials@v4
|
|
with:
|
|
role-to-assume: ${{ secrets.E2E_AWS_IAM_ROLE_ARN }}
|
|
role-session-name: github-ci-aws-e2e-tests
|
|
aws-region: ${{ secrets.E2E_AWS_REGION }}
|
|
|
|
- name: Setup Openmetadata Test Environment
|
|
uses: ./.github/actions/setup-openmetadata-test-environment
|
|
with:
|
|
python-version: '3.10'
|
|
|
|
|
|
- name: Run Python Tests & record coverage
|
|
if: matrix.e2e-test == 'python'
|
|
id: python-e2e-test
|
|
run: |
|
|
source env/bin/activate
|
|
make coverage
|
|
|
|
- name: Run CLI E2E Python Tests & record coverage
|
|
if: matrix.e2e-test != 'python'
|
|
id: e2e-test
|
|
continue-on-error: true
|
|
env:
|
|
E2E_TEST: ${{ matrix.e2e-test }}
|
|
E2E_BQ_PROJECT_ID_TAXONOMY: ${{ secrets.TEST_BQ_PROJECT_ID_TAXONOMY }}
|
|
E2E_BQ_PRIVATE_KEY: ${{ secrets.TEST_BQ_PRIVATE_KEY_E2E }}
|
|
E2E_BQ_PROJECT_ID: ${{ secrets.TEST_BQ_PROJECT_ID }}
|
|
E2E_BQ_PROJECT_ID2: ${{ secrets.TEST_BQ_PROJECT_ID2 }}
|
|
E2E_BQ_PRIVATE_KEY_ID: ${{ secrets.TEST_BQ_PRIVATE_KEY_ID }}
|
|
E2E_BQ_CLIENT_EMAIL: ${{ secrets.TEST_BQ_CLIENT_EMAIL }}
|
|
E2E_BQ_CLIENT_ID: ${{ secrets.TEST_BQ_CLIENT_ID }}
|
|
E2E_SNOWFLAKE_PASSWORD: ${{ secrets.TEST_SNOWFLAKE_PASSWORD_YAML }}
|
|
E2E_SNOWFLAKE_PASSPHRASE: ${{ secrets.TEST_SNOWFLAKE_PASSPHRASE }}
|
|
E2E_SNOWFLAKE_USERNAME: ${{ secrets.TEST_SNOWFLAKE_USERNAME }}
|
|
E2E_SNOWFLAKE_ACCOUNT: ${{ secrets.TEST_SNOWFLAKE_ACCOUNT }}
|
|
E2E_SNOWFLAKE_DATABASE: ${{ secrets.TEST_SNOWFLAKE_DATABASE_E2E }}
|
|
E2E_SNOWFLAKE_WAREHOUSE: ${{ secrets.TEST_SNOWFLAKE_WAREHOUSE }}
|
|
E2E_REDSHIFT_DBT_CATALOG_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_CATALOG_HTTP_FILE_PATH }}
|
|
E2E_REDSHIFT_DBT_MANIFEST_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_MANIFEST_HTTP_FILE_PATH }}
|
|
E2E_REDSHIFT_DBT_RUN_RESULTS_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_RUN_RESULTS_HTTP_FILE_PATH }}
|
|
E2E_REDSHIFT_HOST_PORT: ${{ secrets.E2E_REDSHIFT_HOST_PORT }}
|
|
E2E_REDSHIFT_USERNAME: ${{ secrets.E2E_REDSHIFT_USERNAME }}
|
|
E2E_REDSHIFT_PASSWORD: ${{ secrets.E2E_REDSHIFT_PASSWORD }}
|
|
E2E_REDSHIFT_DATABASE: ${{ secrets.E2E_REDSHIFT_DATABASE }}
|
|
E2E_REDSHIFT_DB: ${{ secrets.E2E_REDSHIFT_DB }}
|
|
E2E_MSSQL_USERNAME: ${{ secrets.E2E_MSSQL_USERNAME }}
|
|
E2E_MSSQL_PASSWORD: ${{ secrets.E2E_MSSQL_PASSWORD }}
|
|
E2E_MSSQL_HOST: ${{ secrets.E2E_MSSQL_HOST }}
|
|
E2E_MSSQL_DATABASE: ${{ secrets.E2E_MSSQL_DATABASE }}
|
|
E2E_VERTICA_USERNAME: ${{ secrets.E2E_VERTICA_USERNAME }}
|
|
E2E_VERTICA_PASSWORD: ${{ secrets.E2E_VERTICA_PASSWORD }}
|
|
E2E_VERTICA_HOST_PORT: ${{ secrets.E2E_VERTICA_HOST_PORT }}
|
|
E2E_TABLEAU_PAT_NAME: ${{ secrets.E2E_TABLEAU_PAT_NAME }}
|
|
E2E_TABLEAU_PAT_SECRET: ${{ secrets.E2E_TABLEAU_PAT_SECRET }}
|
|
E2E_TABLEAU_HOST_PORT: ${{ secrets.E2E_TABLEAU_HOST_PORT }}
|
|
E2E_TABLEAU_SITE: ${{ secrets.E2E_TABLEAU_SITE }}
|
|
E2E_REDASH_HOST_PORT: ${{ secrets.E2E_REDASH_HOST_PORT }}
|
|
E2E_REDASH_USERNAME: ${{ secrets.E2E_REDASH_USERNAME }}
|
|
E2E_REDASH_API_KEY: ${{ secrets.E2E_REDASH_API_KEY }}
|
|
E2E_POWERBI_CLIENT_SECRET: ${{ secrets.E2E_POWERBI_CLIENT_SECRET }}
|
|
E2E_POWERBI_CLIENT_ID: ${{ secrets.E2E_POWERBI_CLIENT_ID }}
|
|
E2E_POWERBI_TENANT_ID: ${{ secrets.E2E_POWERBI_TENANT_ID }}
|
|
E2E_METABASE_HOST_PORT: ${{ secrets.TEST_METABASE_HOST_PORT }}
|
|
E2E_METABASE_PASSWORD: ${{ secrets.TEST_METABASE_PASSWORD }}
|
|
E2E_METABASE_USERNAME: ${{ secrets.TEST_METABASE_USERNAME }}
|
|
E2E_HIVE_HOST_PORT: ${{ secrets.E2E_HIVE_HOST_PORT }}
|
|
E2E_HIVE_AUTH: ${{ secrets.E2E_HIVE_AUTH }}
|
|
E2E_QUICKSIGHT_AWS_ACCOUNTID: ${{ secrets.E2E_QUICKSIGHT_AWS_ACCOUNTID }}
|
|
E2E_QUICKSIGHT_REGION: ${{ secrets.E2E_QUICKSIGHT_REGION }}
|
|
E2E_DATALAKE_S3_BUCKET_NAME: ${{ secrets.E2E_DATALAKE_S3_BUCKET_NAME }}
|
|
E2E_DATALAKE_S3_PREFIX: ${{ secrets.E2E_DATALAKE_S3_PREFIX }}
|
|
E2E_DATALAKE_S3_REGION: ${{ secrets.E2E_AWS_REGION }}
|
|
E2E_POSTGRES_USERNAME: ${{ secrets.E2E_POSTGRES_USERNAME }}
|
|
E2E_POSTGRES_PASSWORD: ${{ secrets.E2E_POSTGRES_PASSWORD }}
|
|
E2E_POSTGRES_HOSTPORT: ${{ secrets.TEST_POSTGRES_HOST_PORT }}
|
|
E2E_POSTGRES_DATABASE: ${{ secrets.E2E_POSTGRES_DATABASE }}
|
|
E2E_ORACLE_HOST_PORT: ${{ secrets.E2E_ORACLE_HOST_PORT }}
|
|
E2E_ORACLE_USERNAME: ${{ secrets.E2E_ORACLE_USERNAME }}
|
|
E2E_ORACLE_PASSWORD: ${{ secrets.E2E_ORACLE_PASSWORD }}
|
|
E2E_ORACLE_SERVICE_NAME: ${{ secrets.E2E_ORACLE_SERVICE_NAME }}
|
|
E2E_ATHENA_REGION: ${{ secrets.E2E_AWS_REGION }}
|
|
E2E_ATHENA_S3STAGINGDIR: ${{ secrets.E2E_ATHENA_S3STAGINGDIR }}
|
|
E2E_ATHENA_WORKGROUP: ${{ secrets.E2E_ATHENA_WORKGROUP }}
|
|
run: |
|
|
source env/bin/activate
|
|
export SITE_CUSTOMIZE_PATH=$(python -c "import site; import os; from pathlib import Path; print(os.path.relpath(site.getsitepackages()[0], str(Path.cwd())))")/sitecustomize.py
|
|
echo "import os" >> $SITE_CUSTOMIZE_PATH
|
|
echo "try:" >> $SITE_CUSTOMIZE_PATH
|
|
echo " import coverage" >> $SITE_CUSTOMIZE_PATH
|
|
echo " os.environ['COVERAGE_PROCESS_START'] = 'ingestion/pyproject.toml'" >> $SITE_CUSTOMIZE_PATH
|
|
echo " coverage.process_startup()" >> $SITE_CUSTOMIZE_PATH
|
|
echo "except ImportError:" >> $SITE_CUSTOMIZE_PATH
|
|
echo " pass" >> $SITE_CUSTOMIZE_PATH
|
|
coverage run --rcfile ingestion/pyproject.toml -a --branch -m pytest -c ingestion/pyproject.toml --junitxml=ingestion/junit/test-results-$E2E_TEST.xml --ignore=ingestion/tests/unit/source ingestion/tests/cli_e2e/test_cli_$E2E_TEST.py
|
|
coverage combine --data-file=.coverage.$E2E_TEST --rcfile=ingestion/pyproject.toml --keep -a .coverage*
|
|
coverage report --rcfile ingestion/pyproject.toml --data-file .coverage.$E2E_TEST || true
|
|
|
|
- name: Upload coverage artifact for Python tests
|
|
if: matrix.e2e-test == 'python' && steps.python-e2e-test.outcome == 'success' && env.DEBUG == 'false'
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: coverage-${{ matrix.e2e-test }}
|
|
path: .coverage
|
|
|
|
- name: Upload coverage artifact for CLI E2E tests
|
|
if: matrix.e2e-test != 'python' && steps.e2e-test.outcome == 'success' && env.DEBUG == 'false'
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: coverage-${{ matrix.e2e-test }}
|
|
path: .coverage.${{ matrix.e2e-test }}
|
|
|
|
- name: Upload tests artifact
|
|
if: steps.e2e-test.outcome == 'success' || steps.python-e2e-test.outcome == 'success' && env.DEBUG == 'false'
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: tests-${{ matrix.e2e-test }}
|
|
path: ingestion/junit/test-results-*.xml
|
|
|
|
- name: Clean Up
|
|
run: |
|
|
cd ./docker/development
|
|
docker compose down --remove-orphans
|
|
sudo rm -rf ${PWD}/docker-volume
|
|
|
|
- name: Slack on Failure
|
|
if: steps.e2e-test.outcome != 'success' && steps.python-e2e-test.outcome != 'success' && env.DEBUG == 'false'
|
|
uses: slackapi/slack-github-action@v1.23.0
|
|
with:
|
|
payload: |
|
|
{
|
|
"text": "🔥 Failed E2E Test for: ${{ matrix.e2e-test }} 🔥\nLogs: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
|
}
|
|
env:
|
|
SLACK_WEBHOOK_URL: ${{ secrets.E2E_SLACK_WEBHOOK }}
|
|
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
|
|
|
|
- name: Force failure
|
|
if: steps.e2e-test.outcome != 'success' && steps.python-e2e-test.outcome != 'success'
|
|
run: |
|
|
exit 1
|
|
|
|
sonar-cloud-coverage-upload:
|
|
runs-on: ubuntu-latest
|
|
needs:
|
|
- py-cli-e2e-tests
|
|
- check-debug
|
|
if: needs.check-debug.outputs.DEBUG == 'false'
|
|
steps:
|
|
|
|
- name: Checkout
|
|
uses: actions/checkout@v4
|
|
|
|
- name: Set up Python 3.10
|
|
uses: actions/setup-python@v5
|
|
with:
|
|
python-version: '3.10'
|
|
|
|
- name: Install Ubuntu dependencies
|
|
run: |
|
|
sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \
|
|
unixodbc-dev libevent-dev python3-dev libkrb5-dev
|
|
|
|
- name: Install coverage dependencies
|
|
run: |
|
|
python3 -m venv env
|
|
source env/bin/activate
|
|
make install_all install_test
|
|
|
|
- name: Download all artifacts
|
|
uses: actions/download-artifact@v3
|
|
with:
|
|
path: artifacts
|
|
|
|
- name: Generate report
|
|
run: |
|
|
for folder in artifacts/coverage-*; do
|
|
cp -rT $folder/ . ;
|
|
done
|
|
mkdir ingestion/junit
|
|
for folder in artifacts/tests-*; do
|
|
cp -rT $folder/ ingestion/junit ;
|
|
done
|
|
source env/bin/activate
|
|
coverage combine --rcfile=ingestion/pyproject.toml --keep -a .coverage*
|
|
coverage xml --rcfile=ingestion/pyproject.toml --data-file=.coverage -o ingestion/coverage.xml
|
|
sed -e "s/$(python -c "import site; import os; from pathlib import Path; print(os.path.relpath(site.getsitepackages()[0], str(Path.cwd())).replace('/','\/'))")/src/g" ingestion/coverage.xml >> ingestion/ci-coverage.xml
|
|
sed -i 's/src\/metadata/\/github\/workspace\/ingestion\/src\/metadata/g' ingestion/ci-coverage.xml
|
|
shell: bash
|
|
|
|
- name: Push Results to Sonar
|
|
uses: sonarsource/sonarcloud-github-action@master
|
|
env:
|
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
SONAR_TOKEN: ${{ secrets.INGESTION_SONAR_SECRET }}
|
|
with:
|
|
projectBaseDir: ingestion/
|