mirror of
https://github.com/datahub-project/datahub.git
synced 2025-09-01 13:23:09 +00:00
ci: tweak ci to decrease wait time of devs (#8945)
This commit is contained in:
parent
817c371fbf
commit
2bc685d3b9
14
.github/workflows/build-and-test.yml
vendored
14
.github/workflows/build-and-test.yml
vendored
@ -27,8 +27,8 @@ jobs:
|
|||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
# metadata-ingestion and airflow-plugin each have dedicated build jobs
|
# metadata-ingestion and airflow-plugin each have dedicated build jobs
|
||||||
"./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel",
|
"except_metadata_ingestion",
|
||||||
"./gradlew :datahub-frontend:build :datahub-web-react:build --parallel",
|
"frontend"
|
||||||
]
|
]
|
||||||
timezone:
|
timezone:
|
||||||
[
|
[
|
||||||
@ -53,9 +53,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
cache: pip
|
cache: pip
|
||||||
- name: Gradle build (and test)
|
- name: Gradle build (and test) for metadata ingestion
|
||||||
|
# we only need the timezone runs for frontend tests
|
||||||
|
if: ${{ matrix.command == 'except_metadata_ingestion' && matrix.timezone == 'America/New_York' }}
|
||||||
run: |
|
run: |
|
||||||
${{ matrix.command }}
|
./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel
|
||||||
|
- name: Gradle build (and test) for frontend
|
||||||
|
if: ${{ matrix.command == 'frontend' }}
|
||||||
|
run: |
|
||||||
|
./gradlew :datahub-frontend:build :datahub-web-react:build --parallel
|
||||||
env:
|
env:
|
||||||
NODE_OPTIONS: "--max-old-space-size=3072"
|
NODE_OPTIONS: "--max-old-space-size=3072"
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
|
7
.github/workflows/metadata-ingestion.yml
vendored
7
.github/workflows/metadata-ingestion.yml
vendored
@ -34,7 +34,6 @@ jobs:
|
|||||||
python-version: ["3.7", "3.10"]
|
python-version: ["3.7", "3.10"]
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"lint",
|
|
||||||
"testQuick",
|
"testQuick",
|
||||||
"testIntegrationBatch0",
|
"testIntegrationBatch0",
|
||||||
"testIntegrationBatch1",
|
"testIntegrationBatch1",
|
||||||
@ -54,6 +53,9 @@ jobs:
|
|||||||
run: ./metadata-ingestion/scripts/install_deps.sh
|
run: ./metadata-ingestion/scripts/install_deps.sh
|
||||||
- name: Install package
|
- name: Install package
|
||||||
run: ./gradlew :metadata-ingestion:installPackageOnly
|
run: ./gradlew :metadata-ingestion:installPackageOnly
|
||||||
|
- name: Run lint alongwith testQuick
|
||||||
|
if: ${{ matrix.command == 'testQuick' }}
|
||||||
|
run: ./gradlew :metadata-ingestion:lint
|
||||||
- name: Run metadata-ingestion tests
|
- name: Run metadata-ingestion tests
|
||||||
run: ./gradlew :metadata-ingestion:${{ matrix.command }}
|
run: ./gradlew :metadata-ingestion:${{ matrix.command }}
|
||||||
- name: Debug info
|
- name: Debug info
|
||||||
@ -65,7 +67,6 @@ jobs:
|
|||||||
docker image ls
|
docker image ls
|
||||||
docker system df
|
docker system df
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
if: ${{ always() && matrix.command != 'lint' }}
|
|
||||||
with:
|
with:
|
||||||
name: Test Results (metadata ingestion ${{ matrix.python-version }})
|
name: Test Results (metadata ingestion ${{ matrix.python-version }})
|
||||||
path: |
|
path: |
|
||||||
@ -73,7 +74,7 @@ jobs:
|
|||||||
**/build/test-results/test/**
|
**/build/test-results/test/**
|
||||||
**/junit.*.xml
|
**/junit.*.xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
if: ${{ always() && matrix.python-version == '3.10' && matrix.command != 'lint' }}
|
if: ${{ always() && matrix.python-version == '3.10' }}
|
||||||
uses: codecov/codecov-action@v3
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
@ -3,11 +3,14 @@ import sys
|
|||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
|
|
||||||
from datahub.ingestion.run.pipeline import Pipeline
|
from datahub.ingestion.run.pipeline import Pipeline
|
||||||
from tests.test_helpers import mce_helpers
|
from tests.test_helpers import mce_helpers
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.integration_batch_2
|
||||||
|
|
||||||
FROZEN_TIME = "2022-02-03 07:00:00"
|
FROZEN_TIME = "2022-02-03 07:00:00"
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ from datahub.ingestion.source.powerbi.m_query import parser, resolver, tree_func
|
|||||||
from datahub.ingestion.source.powerbi.m_query.resolver import DataPlatformTable, Lineage
|
from datahub.ingestion.source.powerbi.m_query.resolver import DataPlatformTable, Lineage
|
||||||
from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, DownstreamColumnRef
|
from datahub.utilities.sqlglot_lineage import ColumnLineageInfo, DownstreamColumnRef
|
||||||
|
|
||||||
pytestmark = pytest.mark.slow
|
pytestmark = pytest.mark.integration_batch_2
|
||||||
|
|
||||||
M_QUERIES = [
|
M_QUERIES = [
|
||||||
'let\n Source = Snowflake.Databases("bu10758.ap-unknown-2.fakecomputing.com","PBI_TEST_WAREHOUSE_PROD",[Role="PBI_TEST_MEMBER"]),\n PBI_TEST_Database = Source{[Name="PBI_TEST",Kind="Database"]}[Data],\n TEST_Schema = PBI_TEST_Database{[Name="TEST",Kind="Schema"]}[Data],\n TESTTABLE_Table = TEST_Schema{[Name="TESTTABLE",Kind="Table"]}[Data]\nin\n TESTTABLE_Table',
|
'let\n Source = Snowflake.Databases("bu10758.ap-unknown-2.fakecomputing.com","PBI_TEST_WAREHOUSE_PROD",[Role="PBI_TEST_MEMBER"]),\n PBI_TEST_Database = Source{[Name="PBI_TEST",Kind="Database"]}[Data],\n TEST_Schema = PBI_TEST_Database{[Name="TEST",Kind="Schema"]}[Data],\n TESTTABLE_Table = TEST_Schema{[Name="TESTTABLE",Kind="Table"]}[Data]\nin\n TESTTABLE_Table',
|
||||||
|
@ -21,7 +21,7 @@ from datahub.ingestion.source.powerbi.rest_api_wrapper.data_classes import (
|
|||||||
)
|
)
|
||||||
from tests.test_helpers import mce_helpers
|
from tests.test_helpers import mce_helpers
|
||||||
|
|
||||||
pytestmark = pytest.mark.slow
|
pytestmark = pytest.mark.integration_batch_2
|
||||||
FROZEN_TIME = "2022-02-03 07:00:00"
|
FROZEN_TIME = "2022-02-03 07:00:00"
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,6 +30,8 @@ from datahub.ingestion.source.snowflake.snowflake_report import SnowflakeV2Repor
|
|||||||
from tests.integration.snowflake.common import FROZEN_TIME, default_query_results
|
from tests.integration.snowflake.common import FROZEN_TIME, default_query_results
|
||||||
from tests.test_helpers import mce_helpers
|
from tests.test_helpers import mce_helpers
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.integration_batch_2
|
||||||
|
|
||||||
|
|
||||||
def random_email():
|
def random_email():
|
||||||
return (
|
return (
|
||||||
@ -55,7 +57,6 @@ def random_cloud_region():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
|
def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
|
||||||
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"
|
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"
|
||||||
|
|
||||||
@ -183,7 +184,6 @@ def test_snowflake_basic(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
|
|||||||
|
|
||||||
|
|
||||||
@freeze_time(FROZEN_TIME)
|
@freeze_time(FROZEN_TIME)
|
||||||
@pytest.mark.integration
|
|
||||||
def test_snowflake_private_link(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
|
def test_snowflake_private_link(pytestconfig, tmp_path, mock_time, mock_datahub_graph):
|
||||||
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"
|
test_resources_dir = pytestconfig.rootpath / "tests/integration/snowflake"
|
||||||
|
|
||||||
|
@ -757,7 +757,7 @@ def test_tableau_no_verify():
|
|||||||
|
|
||||||
|
|
||||||
@freeze_time(FROZEN_TIME)
|
@freeze_time(FROZEN_TIME)
|
||||||
@pytest.mark.slow
|
@pytest.mark.integration_batch_2
|
||||||
def test_tableau_signout_timeout(pytestconfig, tmp_path, mock_datahub_graph):
|
def test_tableau_signout_timeout(pytestconfig, tmp_path, mock_datahub_graph):
|
||||||
enable_logging()
|
enable_logging()
|
||||||
output_file_name: str = "tableau_signout_timeout_mces.json"
|
output_file_name: str = "tableau_signout_timeout_mces.json"
|
||||||
|
@ -13,6 +13,8 @@ from datahub.ingestion.source.sql.trino import TrinoConfig
|
|||||||
from tests.test_helpers import fs_helpers, mce_helpers
|
from tests.test_helpers import fs_helpers, mce_helpers
|
||||||
from tests.test_helpers.docker_helpers import wait_for_port
|
from tests.test_helpers.docker_helpers import wait_for_port
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.integration_batch_1
|
||||||
|
|
||||||
FROZEN_TIME = "2021-09-23 12:00:00"
|
FROZEN_TIME = "2021-09-23 12:00:00"
|
||||||
|
|
||||||
data_platform = "trino"
|
data_platform = "trino"
|
||||||
@ -51,7 +53,6 @@ def loaded_trino(trino_runner):
|
|||||||
|
|
||||||
|
|
||||||
@freeze_time(FROZEN_TIME)
|
@freeze_time(FROZEN_TIME)
|
||||||
@pytest.mark.integration
|
|
||||||
@pytest.mark.xfail
|
@pytest.mark.xfail
|
||||||
def test_trino_ingest(
|
def test_trino_ingest(
|
||||||
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
||||||
@ -111,7 +112,6 @@ def test_trino_ingest(
|
|||||||
|
|
||||||
|
|
||||||
@freeze_time(FROZEN_TIME)
|
@freeze_time(FROZEN_TIME)
|
||||||
@pytest.mark.integration
|
|
||||||
def test_trino_hive_ingest(
|
def test_trino_hive_ingest(
|
||||||
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
||||||
):
|
):
|
||||||
@ -167,7 +167,6 @@ def test_trino_hive_ingest(
|
|||||||
|
|
||||||
|
|
||||||
@freeze_time(FROZEN_TIME)
|
@freeze_time(FROZEN_TIME)
|
||||||
@pytest.mark.integration
|
|
||||||
def test_trino_instance_ingest(
|
def test_trino_instance_ingest(
|
||||||
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
loaded_trino, test_resources_dir, pytestconfig, tmp_path, mock_time
|
||||||
):
|
):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user