ci: tweak ci to decrease wait time of devs (#8945)

This commit is contained in:
Aseem Bansal 2023-10-05 09:31:32 +05:30 committed by GitHub
parent 817c371fbf
commit 2bc685d3b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 24 additions and 15 deletions

View File

@ -27,8 +27,8 @@ jobs:
command:
[
# metadata-ingestion and airflow-plugin each have dedicated build jobs
"./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel",
"./gradlew :datahub-frontend:build :datahub-web-react:build --parallel",
"except_metadata_ingestion",
"frontend"
]
timezone:
[
@ -53,9 +53,15 @@ jobs:
with:
python-version: "3.10"
cache: pip
- name: Gradle build (and test)
- name: Gradle build (and test) for metadata ingestion
# we only need the timezone runs for frontend tests
if: ${{ matrix.command == 'except_metadata_ingestion' && matrix.timezone == 'America/New_York' }}
run: |
${{ matrix.command }}
./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel
- name: Gradle build (and test) for frontend
if: ${{ matrix.command == 'frontend' }}
run: |
./gradlew :datahub-frontend:build :datahub-web-react:build --parallel
env:
NODE_OPTIONS: "--max-old-space-size=3072"
- uses: actions/upload-artifact@v3

View File

@ -34,7 +34,6 @@ jobs:
python-version: ["3.7", "3.10"]
command:
[
"lint",
"testQuick",
"testIntegrationBatch0",
"testIntegrationBatch1",
@ -54,6 +53,9 @@ jobs:
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Install package
run: ./gradlew :metadata-ingestion:installPackageOnly
- name: Run lint alongwith testQuick
if: ${{ matrix.command == 'testQuick' }}
run: ./gradlew :metadata-ingestion:lint
- name: Run metadata-ingestion tests
run: ./gradlew :metadata-ingestion:${{ matrix.command }}
- name: Debug info
@ -65,7 +67,6 @@ jobs:
docker image ls
docker system df
- uses: actions/upload-artifact@v3
if: ${{ always() && matrix.command != 'lint' }}
with:
name: Test Results (metadata ingestion ${{ matrix.python-version }})
path: |
@ -73,7 +74,7 @@ jobs:
**/build/test-results/test/**
**/junit.*.xml
- name: Upload coverage to Codecov
if: ${{ always() && matrix.python-version == '3.10' && matrix.command != 'lint' }}
if: ${{ always() && matrix.python-version == '3.10' }}
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}

View File

@ -3,11 +3,14 @@ import sys
from typing import Any, Dict
from unittest import mock
import pytest
from freezegun import freeze_time
from datahub.ingestion.run.pipeline import Pipeline
from tests.test_helpers import mce_helpers
pytestmark = pytest.mark.integration_batch_2
FROZEN_TIME = "2022-02-03 07:00:00"

View File

@ -19,7 +19,7 @@ from datahub.ingestion.source.powerbi.m_query import parser, resolver, tree_func
from datahub.ingestion.source.powerbi.m_query.resolver import DataPlatformTable, Lineage
from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, DownstreamColumnRef
pytestmark = pytest.mark.slow
pytestmark = pytest.mark.integration_batch_2
M_QUERIES = [
'let\n Source = Snowflake.Databases("bu10758.ap-unknown-2.fakecomputing.com","PBI_TEST_WAREHOUSE_PROD",[Role="PBI_TEST_MEMBER"]),\n PBI_TEST_Database = Source{[Name="PBI_TEST",Kind="Database"]}[Data],\n TEST_Schema = PBI_TEST_Database{[Name="TEST",Kind="Schema"]}[Data],\n TESTTABLE_Table = TEST_Schema{[Name="TESTTABLE",Kind="Table"]}[Data]\nin\n TESTTABLE_Table',

View File

@ -21,7 +21,7 @@ from datahub.ingestion.source.powerbi.rest_api_wrapper.data_classes import (
)
from tests.test_helpers import mce_helpers
pytestmark = pytest.mark.slow
pytestmark = pytest.mark.integration_batch_2
FROZEN_TIME = "2022-02-03 07:00:00"

View File

@ -30,6 +30,8 @@ from datahub.ingestion.source.snowflake.snowflake_report import SnowflakeV2Repor
from tests.integration.snowflake.common import FROZEN_TIME, default_query_results
from tests.test_helpers import mce_helpers
pytestmark = pytest.mark.integration_batch_2
def random_email():
return (
@ -55,7 +57,6 @@ def random_cloud_region():
)
@pytest.mark.integration
def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"
@ -183,7 +184,6 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
@freeze_time(FROZEN_TIME)
@pytest.mark.integration
def test_snowflake_private_link(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"

View File

@ -757,7 +757,7 @@ def test_tableau_no_verify():
@freeze_time(FROZEN_TIME)
@pytest.mark.slow
@pytest.mark.integration_batch_2
def test_tableau_signout_timeout(pytestconfig, tmp_path, mock_datahub_graph):
enable_logging()
output_file_name: str = "tableau_signout_timeout_mces.json"

View File

@ -13,6 +13,8 @@ from datahub.ingestion.source.sql.trino import TrinoConfig
from tests.test_helpers import fs_helpers, mce_helpers
from tests.test_helpers.docker_helpers import wait_for_port
pytestmark = pytest.mark.integration_batch_1
FROZEN_TIME = "2021-09-23 12:00:00"
data_platform = "trino"
@ -51,7 +53,6 @@ def loaded_trino(trino_runner):
@freeze_time(FROZEN_TIME)
@pytest.mark.integration
@pytest.mark.xfail
def test_trino_ingest(
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
@ -111,7 +112,6 @@ def test_trino_ingest(
@freeze_time(FROZEN_TIME)
@pytest.mark.integration
def test_trino_hive_ingest(
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
):
@ -167,7 +167,6 @@ def test_trino_hive_ingest(
@freeze_time(FROZEN_TIME)
@pytest.mark.integration
def test_trino_instance_ingest(
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
):