mirror of
https://github.com/datahub-project/datahub.git
synced 2025-12-28 02:17:53 +00:00
fix(cli): avoid click 8.2.0 due to bugs (#13518)
This commit is contained in:
parent
a00e65cd2f
commit
1c7836dce8
@ -28,7 +28,8 @@ base_requirements = {
|
||||
}
|
||||
|
||||
framework_common = {
|
||||
"click>=7.1.2",
|
||||
# Avoiding click 8.2.0 due to https://github.com/pallets/click/issues/2894
|
||||
"click>=7.1.2, !=8.2.0",
|
||||
"click-default-group",
|
||||
"PyYAML",
|
||||
"toml>=0.10.0",
|
||||
|
||||
@ -377,7 +377,8 @@ def _maybe_print_upgrade_message(
|
||||
+ click.style(
|
||||
f"➡️ Upgrade via \"pip install 'acryl-datahub=={version_stats.server.current.version}'\"",
|
||||
fg="cyan",
|
||||
)
|
||||
),
|
||||
err=True,
|
||||
)
|
||||
elif client_server_compat == 0 and encourage_cli_upgrade:
|
||||
with contextlib.suppress(Exception):
|
||||
@ -387,7 +388,8 @@ def _maybe_print_upgrade_message(
|
||||
+ click.style(
|
||||
f"You seem to be running an old version of datahub cli: {current_version} {get_days(current_release_date)}. Latest version is {latest_version} {get_days(latest_release_date)}.\nUpgrade via \"pip install -U 'acryl-datahub'\"",
|
||||
fg="cyan",
|
||||
)
|
||||
),
|
||||
err=True,
|
||||
)
|
||||
elif encourage_quickstart_upgrade:
|
||||
try:
|
||||
|
||||
@ -4,3 +4,10 @@ markers =
|
||||
; no_cypress_suite0: main smoke tests; expressed as the negative of the others
|
||||
no_cypress_suite1: main smoke tests, suite 1
|
||||
test_run_cypress: run cypress tests
|
||||
|
||||
filterwarnings =
|
||||
# Ignore some warnings that come from dependencies.
|
||||
ignore::datahub.configuration.pydantic_migration_helpers.PydanticDeprecatedSince20
|
||||
ignore:The new datahub SDK:datahub.errors.ExperimentalWarning
|
||||
# We should not be unexpectedly seeing API tracing warnings.
|
||||
error::datahub.errors.APITracingWarning
|
||||
|
||||
@ -7,14 +7,12 @@ from random import randint
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from click.testing import CliRunner
|
||||
|
||||
from datahub.api.entities.dataset.dataset import Dataset
|
||||
from datahub.emitter.mce_builder import make_dataset_urn
|
||||
from datahub.entrypoints import datahub
|
||||
from datahub.ingestion.graph.client import DataHubGraph
|
||||
from tests.consistency_utils import wait_for_writes_to_sync
|
||||
from tests.utils import delete_urns, get_sleep_info
|
||||
from tests.utils import delete_urns, get_sleep_info, run_datahub_cmd
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -24,7 +22,6 @@ dataset_id = f"test_dataset_sync_{start_index}"
|
||||
dataset_urn = make_dataset_urn("snowflake", dataset_id)
|
||||
|
||||
sleep_sec, sleep_times = get_sleep_info()
|
||||
runner = CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@ -76,8 +73,7 @@ def create_dataset_yaml(file_path: Path, additional_properties=None):
|
||||
def run_cli_command(cmd, auth_session):
|
||||
"""Run a DataHub CLI command using CliRunner and auth_session"""
|
||||
args = cmd.split()
|
||||
result = runner.invoke(
|
||||
datahub,
|
||||
result = run_datahub_cmd(
|
||||
args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
|
||||
@ -5,14 +5,15 @@ import tempfile
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from click.testing import CliRunner, Result
|
||||
|
||||
import datahub.emitter.mce_builder as builder
|
||||
from datahub.emitter.serialization_helper import pre_json_transform
|
||||
from datahub.entrypoints import datahub
|
||||
from datahub.metadata.schema_classes import DatasetProfileClass
|
||||
from tests.aspect_generators.timeseries.dataset_profile_gen import gen_dataset_profiles
|
||||
from tests.utils import get_strftime_from_timestamp_millis, wait_for_writes_to_sync
|
||||
from tests.utils import (
|
||||
get_strftime_from_timestamp_millis,
|
||||
run_datahub_cmd,
|
||||
wait_for_writes_to_sync,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -24,8 +25,6 @@ test_dataset_urn: str = builder.make_dataset_urn_with_platform_instance(
|
||||
"TEST",
|
||||
)
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
|
||||
def sync_elastic() -> None:
|
||||
wait_for_writes_to_sync()
|
||||
@ -45,8 +44,7 @@ def datahub_put_profile(auth_session, dataset_profile: DatasetProfileClass) -> N
|
||||
"-d",
|
||||
aspect_file.name,
|
||||
]
|
||||
put_result = runner.invoke(
|
||||
datahub,
|
||||
put_result = run_datahub_cmd(
|
||||
put_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
@ -63,8 +61,7 @@ def datahub_get_and_verify_profile(
|
||||
# Wait for writes to stabilize in elastic
|
||||
sync_elastic()
|
||||
get_args: List[str] = ["get", "--urn", test_dataset_urn, "-a", test_aspect_name]
|
||||
get_result: Result = runner.invoke(
|
||||
datahub,
|
||||
get_result = run_datahub_cmd(
|
||||
get_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
@ -98,8 +95,7 @@ def datahub_delete(auth_session, params: List[str]) -> None:
|
||||
args.extend(params)
|
||||
args.append("--hard")
|
||||
logger.info(f"Running delete command with args: {args}")
|
||||
delete_result: Result = runner.invoke(
|
||||
datahub,
|
||||
delete_result = run_datahub_cmd(
|
||||
args,
|
||||
input="y\ny\n",
|
||||
env={
|
||||
|
||||
@ -1,15 +1,10 @@
|
||||
import json
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from click.testing import CliRunner, Result
|
||||
|
||||
import datahub.emitter.mce_builder as builder
|
||||
from datahub.emitter.serialization_helper import post_json_transform
|
||||
from datahub.entrypoints import datahub
|
||||
from datahub.metadata.schema_classes import DatasetProfileClass
|
||||
from tests.utils import ingest_file_via_rest, wait_for_writes_to_sync
|
||||
|
||||
runner = CliRunner()
|
||||
from tests.utils import ingest_file_via_rest, run_datahub_cmd, wait_for_writes_to_sync
|
||||
|
||||
|
||||
def sync_elastic() -> None:
|
||||
@ -19,8 +14,7 @@ def sync_elastic() -> None:
|
||||
def datahub_rollback(auth_session, run_id: str) -> None:
|
||||
sync_elastic()
|
||||
rollback_args: List[str] = ["ingest", "rollback", "--run-id", run_id, "-f"]
|
||||
rollback_result: Result = runner.invoke(
|
||||
datahub,
|
||||
rollback_result = run_datahub_cmd(
|
||||
rollback_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
@ -39,8 +33,7 @@ def datahub_get_and_verify_profile(
|
||||
# Wait for writes to stabilize in elastic
|
||||
sync_elastic()
|
||||
get_args: List[str] = ["get", "--urn", urn, "-a", aspect_name]
|
||||
get_result: Result = runner.invoke(
|
||||
datahub,
|
||||
get_result = run_datahub_cmd(
|
||||
get_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
|
||||
@ -4,14 +4,10 @@ import tempfile
|
||||
from typing import Any, Dict, Iterable, List
|
||||
|
||||
import yaml
|
||||
from click.testing import CliRunner, Result
|
||||
|
||||
from datahub.api.entities.corpgroup.corpgroup import CorpGroup
|
||||
from datahub.entrypoints import datahub
|
||||
from datahub.ingestion.graph.client import DataHubGraph
|
||||
from tests.utils import wait_for_writes_to_sync
|
||||
|
||||
runner = CliRunner()
|
||||
from tests.utils import run_datahub_cmd, wait_for_writes_to_sync
|
||||
|
||||
|
||||
def sync_elastic() -> None:
|
||||
@ -28,8 +24,7 @@ def datahub_upsert_group(auth_session: Any, group: CorpGroup) -> None:
|
||||
"-f",
|
||||
group_file.name,
|
||||
]
|
||||
group_create_result = runner.invoke(
|
||||
datahub,
|
||||
group_create_result = run_datahub_cmd(
|
||||
upsert_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
@ -56,8 +51,7 @@ def gen_datahub_groups(num_groups: int) -> Iterable[CorpGroup]:
|
||||
|
||||
def datahub_get_group(auth_session, group_urn: str):
|
||||
get_args: List[str] = ["get", "--urn", group_urn]
|
||||
get_result: Result = runner.invoke(
|
||||
datahub,
|
||||
get_result = run_datahub_cmd(
|
||||
get_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
|
||||
@ -5,13 +5,10 @@ import time
|
||||
from typing import Any, Dict, Iterable, List
|
||||
|
||||
import yaml
|
||||
from click.testing import CliRunner, Result
|
||||
|
||||
from datahub.api.entities.corpuser.corpuser import CorpUser
|
||||
from datahub.entrypoints import datahub
|
||||
from tests.consistency_utils import wait_for_writes_to_sync
|
||||
|
||||
runner = CliRunner()
|
||||
from tests.utils import run_datahub_cmd
|
||||
|
||||
|
||||
def datahub_upsert_user(auth_session, user: CorpUser) -> None:
|
||||
@ -24,8 +21,7 @@ def datahub_upsert_user(auth_session, user: CorpUser) -> None:
|
||||
"-f",
|
||||
user_file.name,
|
||||
]
|
||||
user_create_result = runner.invoke(
|
||||
datahub,
|
||||
user_create_result = run_datahub_cmd(
|
||||
upsert_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
@ -55,8 +51,7 @@ def gen_datahub_users(num_users: int) -> Iterable[CorpUser]:
|
||||
|
||||
def datahub_get_user(auth_session: Any, user_urn: str):
|
||||
get_args: List[str] = ["get", "--urn", user_urn]
|
||||
get_result: Result = runner.invoke(
|
||||
datahub,
|
||||
get_result = run_datahub_cmd(
|
||||
get_args,
|
||||
env={
|
||||
"DATAHUB_GMS_URL": auth_session.gms_url(),
|
||||
|
||||
@ -2,14 +2,18 @@ import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import click
|
||||
import click.testing
|
||||
import requests
|
||||
import tenacity
|
||||
from joblib import Parallel, delayed
|
||||
from packaging import version
|
||||
from requests.structures import CaseInsensitiveDict
|
||||
|
||||
from datahub.cli import cli_utils, env_utils
|
||||
from datahub.entrypoints import datahub
|
||||
from datahub.ingestion.run.pipeline import Pipeline
|
||||
from tests.consistency_utils import wait_for_writes_to_sync
|
||||
|
||||
@ -99,6 +103,22 @@ def check_endpoint(auth_session, url):
|
||||
raise SystemExit(f"{url}: is Not reachable \nErr: {e}")
|
||||
|
||||
|
||||
def run_datahub_cmd(
|
||||
command: List[str],
|
||||
*,
|
||||
input: Optional[str] = None,
|
||||
env: Optional[Dict[str, str]] = None,
|
||||
) -> click.testing.Result:
|
||||
# TODO: Unify this with the run_datahub_cmd in the metadata-ingestion directory.
|
||||
click_version: str = click.__version__ # type: ignore
|
||||
if version.parse(click_version) >= version.parse("8.2.0"):
|
||||
runner = click.testing.CliRunner()
|
||||
else:
|
||||
# Once we're pinned to click >= 8.2.0, we can remove this.
|
||||
runner = click.testing.CliRunner(mix_stderr=False) # type: ignore
|
||||
return runner.invoke(datahub, command, input=input, env=env)
|
||||
|
||||
|
||||
def ingest_file_via_rest(
|
||||
auth_session, filename: str, mode: str = "ASYNC_BATCH"
|
||||
) -> Pipeline:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user