# Copyright 2021 Collate # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: py-cli-e2e-tests on: schedule: - cron: '0 0 * * *' workflow_dispatch: permissions: id-token: write contents: read jobs: py-cli-e2e-tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: e2e-test: ['bigquery', 'dbt_redshift', 'metabase', 'mssql', 'mysql', 'redash', 'snowflake', 'tableau', 'powerbi', 'vertica', 'python', 'redshift', 'hive', 'quicksight', 'datalake_s3', 'postgres'] environment: test steps: - name: Checkout uses: actions/checkout@v3 - name: Set up JDK 11 uses: actions/setup-java@v3 with: java-version: '11' distribution: 'adopt' - name: Set up Python 3.9 uses: actions/setup-python@v4 with: python-version: 3.9 - name: configure aws credentials if: contains('quicksight', matrix.e2e-test) || contains('datalake_s3', matrix.e2e-test) uses: aws-actions/configure-aws-credentials@v1 with: role-to-assume: ${{ secrets.E2E_AWS_IAM_ROLE_ARN }} role-session-name: github-ci-aws-e2e-tests aws-region: ${{ secrets.E2E_AWS_REGION }} - name: Install Ubuntu dependencies run: | sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \ unixodbc-dev libevent-dev python3-dev libkrb5-dev - name: Generate models run: | python3 -m venv env source env/bin/activate sudo make install_antlr_cli make install_dev generate - name: Install open-metadata dependencies run: | source env/bin/activate make install_all install_test - name: Start Server and Ingest Sample Data uses: nick-fields/retry@v2.8.3 env: INGESTION_DEPENDENCY: "mysql,elasticsearch" with: timeout_minutes: 30 max_attempts: 2 retry_on: error command: ./docker/run_local_docker.sh -m no-ui - name: Run Python Tests & record coverage if: matrix.e2e-test == 'python' id: python-e2e-test run: | source env/bin/activate make coverage - name: Run CLI E2E Python Tests & record coverage if: matrix.e2e-test != 'python' id: e2e-test continue-on-error: true env: E2E_TEST: ${{ matrix.e2e-test }} E2E_BQ_PROJECT_ID_TAXONOMY: ${{ secrets.TEST_BQ_PROJECT_ID_TAXONOMY }} E2E_BQ_PRIVATE_KEY: ${{ secrets.TEST_BQ_PRIVATE_KEY_E2E }} E2E_BQ_PROJECT_ID: ${{ secrets.TEST_BQ_PROJECT_ID }} E2E_BQ_PRIVATE_KEY_ID: ${{ secrets.TEST_BQ_PRIVATE_KEY_ID }} E2E_BQ_CLIENT_EMAIL: ${{ secrets.TEST_BQ_CLIENT_EMAIL }} E2E_BQ_CLIENT_ID: ${{ secrets.TEST_BQ_CLIENT_ID }} E2E_SNOWFLAKE_PASSWORD: ${{ secrets.TEST_SNOWFLAKE_PASSWORD }} E2E_SNOWFLAKE_USERNAME: ${{ secrets.TEST_SNOWFLAKE_USERNAME }} E2E_SNOWFLAKE_ACCOUNT: ${{ secrets.TEST_SNOWFLAKE_ACCOUNT }} E2E_SNOWFLAKE_DATABASE: ${{ secrets.TEST_SNOWFLAKE_DATABASE_E2E }} E2E_SNOWFLAKE_WAREHOUSE: ${{ secrets.TEST_SNOWFLAKE_WAREHOUSE }} E2E_REDSHIFT_DBT_CATALOG_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_CATALOG_HTTP_FILE_PATH }} E2E_REDSHIFT_DBT_MANIFEST_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_MANIFEST_HTTP_FILE_PATH }} E2E_REDSHIFT_DBT_RUN_RESULTS_HTTP_FILE_PATH: ${{ secrets.E2E_REDSHIFT_DBT_RUN_RESULTS_HTTP_FILE_PATH }} E2E_REDSHIFT_HOST_PORT: ${{ secrets.E2E_REDSHIFT_HOST_PORT }} E2E_REDSHIFT_USERNAME: ${{ secrets.E2E_REDSHIFT_USERNAME }} E2E_REDSHIFT_PASSWORD: ${{ secrets.E2E_REDSHIFT_PASSWORD }} E2E_REDSHIFT_DATABASE: ${{ secrets.E2E_REDSHIFT_DATABASE }} E2E_REDSHIFT_DB: ${{ secrets.E2E_REDSHIFT_DB }} E2E_MSSQL_USERNAME: ${{ secrets.E2E_MSSQL_USERNAME }} E2E_MSSQL_PASSWORD: ${{ secrets.E2E_MSSQL_PASSWORD }} E2E_MSSQL_HOST: ${{ secrets.E2E_MSSQL_HOST }} E2E_MSSQL_DATABASE: ${{ secrets.E2E_MSSQL_DATABASE }} E2E_VERTICA_USERNAME: ${{ secrets.E2E_VERTICA_USERNAME }} E2E_VERTICA_PASSWORD: ${{ secrets.E2E_VERTICA_PASSWORD }} E2E_VERTICA_HOST_PORT: ${{ secrets.E2E_VERTICA_HOST_PORT }} E2E_TABLEAU_USERNAME: ${{ secrets.E2E_TABLEAU_USERNAME }} E2E_TABLEAU_PASSWORD: ${{ secrets.E2E_TABLEAU_PASSWORD }} E2E_TABLEAU_HOST_PORT: ${{ secrets.E2E_TABLEAU_HOST_PORT }} E2E_TABLEAU_SITE: ${{ secrets.E2E_TABLEAU_SITE }} E2E_REDASH_HOST_PORT: ${{ secrets.E2E_REDASH_HOST_PORT }} E2E_REDASH_USERNAME: ${{ secrets.E2E_REDASH_USERNAME }} E2E_REDASH_API_KEY: ${{ secrets.E2E_REDASH_API_KEY }} E2E_POWERBI_CLIENT_SECRET: ${{ secrets.E2E_POWERBI_CLIENT_SECRET }} E2E_POWERBI_CLIENT_ID: ${{ secrets.E2E_POWERBI_CLIENT_ID }} E2E_POWERBI_TENANT_ID: ${{ secrets.E2E_POWERBI_TENANT_ID }} E2E_METABASE_HOST_PORT: ${{ secrets.TEST_METABASE_HOST_PORT }} E2E_METABASE_PASSWORD: ${{ secrets.TEST_METABASE_PASSWORD }} E2E_METABASE_USERNAME: ${{ secrets.TEST_METABASE_USERNAME }} E2E_HIVE_HOST_PORT: ${{ secrets.E2E_HIVE_HOST_PORT }} E2E_HIVE_AUTH: ${{ secrets.E2E_HIVE_AUTH }} E2E_QUICKSIGHT_AWS_ACCOUNTID: ${{ secrets.E2E_QUICKSIGHT_AWS_ACCOUNTID }} E2E_QUICKSIGHT_REGION: ${{ secrets.E2E_QUICKSIGHT_REGION }} E2E_DATALAKE_S3_BUCKET_NAME: ${{ secrets.E2E_DATALAKE_S3_BUCKET_NAME }} E2E_DATALAKE_S3_PREFIX: ${{ secrets.E2E_DATALAKE_S3_PREFIX }} E2E_DATALAKE_S3_REGION: ${{ secrets.E2E_DATALAKE_S3_REGION }} E2E_POSTGRES_USERNAME: ${{ secrets.E2E_POSTGRES_USERNAME }} E2E_POSTGRES_PASSWORD: ${{ secrets.E2E_POSTGRES_PASSWORD }} E2E_POSTGRES_HOSTPORT: ${{ secrets.E2E_POSTGRES_HOSTPORT }} E2E_POSTGRES_DATABASE: ${{ secrets.E2E_POSTGRES_DATABASE }} run: | source env/bin/activate export SITE_CUSTOMIZE_PATH=$(python -c "import site; import os; from pathlib import Path; print(os.path.relpath(site.getsitepackages()[0], str(Path.cwd())))")/sitecustomize.py echo "import os" >> $SITE_CUSTOMIZE_PATH echo "try:" >> $SITE_CUSTOMIZE_PATH echo " import coverage" >> $SITE_CUSTOMIZE_PATH echo " os.environ['COVERAGE_PROCESS_START'] = 'ingestion/.coveragerc'" >> $SITE_CUSTOMIZE_PATH echo " coverage.process_startup()" >> $SITE_CUSTOMIZE_PATH echo "except ImportError:" >> $SITE_CUSTOMIZE_PATH echo " pass" >> $SITE_CUSTOMIZE_PATH sed -i "5i concurrency = multiprocessing" ingestion/.coveragerc coverage run --rcfile ingestion/.coveragerc -a --branch -m pytest -c ingestion/setup.cfg --junitxml=ingestion/junit/test-results-$E2E_TEST.xml --ignore=ingestion/tests/unit/source ingestion/tests/cli_e2e/test_cli_$E2E_TEST.py coverage combine --data-file=.coverage.$E2E_TEST --rcfile=ingestion/.coveragerc --keep -a .coverage* coverage report --rcfile ingestion/.coveragerc --data-file .coverage.$E2E_TEST || true - name: Upload coverage artifact for Python tests if: matrix.e2e-test == 'python' && steps.python-e2e-test.outcome == 'success' uses: actions/upload-artifact@v3 with: name: coverage-${{ matrix.e2e-test }} path: .coverage - name: Upload coverage artifact for CLI E2E tests if: matrix.e2e-test != 'python' && steps.e2e-test.outcome == 'success' uses: actions/upload-artifact@v3 with: name: coverage-${{ matrix.e2e-test }} path: .coverage.${{ matrix.e2e-test }} - name: Upload tests artifact if: steps.e2e-test.outcome == 'success' || steps.python-e2e-test.outcome == 'success' uses: actions/upload-artifact@v3 with: name: tests-${{ matrix.e2e-test }} path: ingestion/junit/test-results-*.xml - name: Clean Up run: | cd ./docker/development docker compose down --remove-orphans sudo rm -rf ${PWD}/docker-volume - name: Slack on Failure if: steps.e2e-test.outcome != 'success' && steps.python-e2e-test.outcome != 'success' uses: slackapi/slack-github-action@v1.23.0 with: payload: | { "text": "🔥 Failed E2E Test for: ${{ matrix.e2e-test }} 🔥" } env: SLACK_WEBHOOK_URL: ${{ secrets.E2E_SLACK_WEBHOOK }} SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK - name: Force failure if: steps.e2e-test.outcome != 'success' && steps.python-e2e-test.outcome != 'success' run: | exit 1 sonar-cloud-coverage-upload: runs-on: ubuntu-latest needs: py-cli-e2e-tests steps: - name: Checkout uses: actions/checkout@v3 - name: Set up Python 3.9 uses: actions/setup-python@v4 with: python-version: 3.9 - name: Install Ubuntu dependencies run: | sudo apt-get update && sudo apt-get install -y unixodbc-dev python3-venv librdkafka-dev gcc libsasl2-dev build-essential libssl-dev libffi-dev \ unixodbc-dev libevent-dev python3-dev libkrb5-dev - name: Install coverage dependencies run: | python3 -m venv env source env/bin/activate make install_all install_test - name: Download all artifacts uses: actions/download-artifact@v3 with: path: artifacts - name: Generate report run: | for folder in artifacts/coverage-*; do cp -rT $folder/ . ; done mkdir ingestion/junit for folder in artifacts/tests-*; do cp -rT $folder/ ingestion/junit ; done source env/bin/activate coverage combine --rcfile=ingestion/.coveragerc --keep -a .coverage* coverage xml --rcfile=ingestion/.coveragerc --data-file=.coverage -o ingestion/coverage.xml sed -e "s/$(python -c "import site; import os; from pathlib import Path; print(os.path.relpath(site.getsitepackages()[0], str(Path.cwd())).replace('/','\/'))")/src/g" ingestion/coverage.xml >> ingestion/ci-coverage.xml sed -i 's/src\/metadata/\/github\/workspace\/ingestion\/src\/metadata/g' ingestion/ci-coverage.xml shell: bash - name: Push Results to Sonar uses: sonarsource/sonarcloud-github-action@master env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.INGESTION_SONAR_SECRET }} with: projectBaseDir: ingestion/