mirror of
https://github.com/datahub-project/datahub.git
synced 2025-11-02 11:49:23 +00:00
feat(python): upgrade mypy to 1.17.1 (#14380)
Co-authored-by: Cursor Agent <cursoragent@cursor.com>
This commit is contained in:
parent
ed3cfc5d42
commit
2e18706f78
@ -41,7 +41,7 @@ lint_requirements = {
|
||||
# This is pinned only to avoid spurious errors in CI.
|
||||
# We should make an effort to keep it up to date.
|
||||
"ruff==0.11.7",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
}
|
||||
|
||||
base_requirements = {
|
||||
|
||||
@ -72,7 +72,7 @@ dev_requirements = {
|
||||
*base_requirements,
|
||||
*mypy_stubs,
|
||||
"coverage>=5.1",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
"ruff==0.11.7",
|
||||
"pytest>=6.2.2",
|
||||
"pytest-cov>=2.8.1",
|
||||
|
||||
@ -55,7 +55,7 @@ base_dev_requirements = {
|
||||
"dagster-snowflake-pandas >= 0.11.0",
|
||||
"coverage>=5.1",
|
||||
"ruff==0.11.7",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
# pydantic 1.8.2 is incompatible with mypy 0.910.
|
||||
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
|
||||
"pydantic>=1.10.0,!=1.10.3",
|
||||
|
||||
@ -62,7 +62,7 @@ base_dev_requirements = {
|
||||
*mypy_stubs,
|
||||
"coverage>=5.1",
|
||||
"ruff==0.11.7",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
"pytest>=6.2.2",
|
||||
"pytest-asyncio>=0.16.0",
|
||||
"pytest-cov>=2.8.1",
|
||||
|
||||
@ -27,6 +27,8 @@ base_requirements = {
|
||||
# Actual dependencies.
|
||||
# Temporary pinning to 2.0.0 until we can upgrade to 3.0.0
|
||||
"prefect >= 2.0.0,<3.0.0",
|
||||
# Pin asyncpg to a version compatible with Python 3.13
|
||||
"asyncpg>=0.30.0",
|
||||
*rest_common,
|
||||
f"acryl-datahub[datahub-rest]{_self_pin}",
|
||||
}
|
||||
@ -55,7 +57,7 @@ dev_requirements = {
|
||||
*mypy_stubs,
|
||||
"coverage>=5.1",
|
||||
"ruff==0.11.7",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
# pydantic 1.8.2 is incompatible with mypy 0.910.
|
||||
# See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910.
|
||||
"pydantic>=1.10",
|
||||
|
||||
@ -645,7 +645,7 @@ lint_requirements = {
|
||||
# This is pinned only to avoid spurious errors in CI.
|
||||
# We should make an effort to keep it up to date.
|
||||
"ruff==0.11.7",
|
||||
"mypy==1.14.1",
|
||||
"mypy==1.17.1",
|
||||
}
|
||||
|
||||
base_dev_requirements = {
|
||||
|
||||
@ -455,7 +455,11 @@ class PathSpec(ConfigModel):
|
||||
partition = partition.rsplit("/", 1)[0]
|
||||
for partition_key in partition.split("/"):
|
||||
if partition_key.find("=") != -1:
|
||||
partition_keys.append(tuple(partition_key.split("=")))
|
||||
key_value = partition_key.split(
|
||||
"=", 1
|
||||
) # Split into at most 2 parts
|
||||
if len(key_value) == 2:
|
||||
partition_keys.append((key_value[0], key_value[1]))
|
||||
else:
|
||||
partition_split = partition.rsplit("/", 1)
|
||||
if len(partition_split) == 1:
|
||||
|
||||
@ -69,9 +69,10 @@ class FivetranLogAPI:
|
||||
fivetran_log_query.set_schema(bigquery_destination_config.dataset)
|
||||
|
||||
# The "database" should be the BigQuery project name.
|
||||
fivetran_log_database = engine.execute(
|
||||
"SELECT @@project_id"
|
||||
).fetchone()[0]
|
||||
result = engine.execute("SELECT @@project_id").fetchone()
|
||||
if result is None:
|
||||
raise ValueError("Failed to retrieve BigQuery project ID")
|
||||
fivetran_log_database = result[0]
|
||||
else:
|
||||
raise ConfigurationError(
|
||||
f"Destination platform '{destination_platform}' is not yet supported."
|
||||
|
||||
@ -1184,7 +1184,7 @@ class TableauSiteSource:
|
||||
self.report.warning(
|
||||
title="Incomplete project hierarchy",
|
||||
message="Project details missing. Child projects will be ingested without reference to their parent project. We generally need Site Administrator Explorer permissions to extract the complete project hierarchy.",
|
||||
context=f"Missing {project.parent_id}, referenced by {project.id} {project.project_name}",
|
||||
context=f"Missing {project.parent_id}, referenced by {project.id} {project.name}",
|
||||
)
|
||||
project.parent_id = None
|
||||
|
||||
|
||||
@ -281,11 +281,14 @@ class BaseTransformer(Transformer, metaclass=ABCMeta):
|
||||
)
|
||||
)
|
||||
|
||||
record_metadata = _update_work_unit_id(
|
||||
envelope=envelope,
|
||||
aspect_name=mcp.aspect.get_aspect_name(), # type: ignore
|
||||
urn=mcp.entityUrn,
|
||||
)
|
||||
if mcp.entityUrn:
|
||||
record_metadata = _update_work_unit_id(
|
||||
envelope=envelope,
|
||||
aspect_name=mcp.aspect.get_aspect_name(), # type: ignore
|
||||
urn=mcp.entityUrn,
|
||||
)
|
||||
else:
|
||||
record_metadata = envelope.metadata.copy()
|
||||
|
||||
yield RecordEnvelope(
|
||||
record=mcp,
|
||||
|
||||
@ -242,7 +242,8 @@ class RestServiceConfig:
|
||||
|
||||
# Check if this is a config-based feature
|
||||
if feature in config_based_features:
|
||||
return config_based_features[feature]()
|
||||
result = config_based_features[feature]()
|
||||
return bool(result) if result is not None else False
|
||||
|
||||
# For environment-based features, determine requirements based on cloud vs. non-cloud
|
||||
deployment_type = "cloud" if self.is_datahub_cloud else "core"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user