mirror of
https://github.com/datahub-project/datahub.git
synced 2025-11-08 07:23:34 +00:00
fix(sdk): make Filter type parsing more permissive (#14212)
This commit is contained in:
parent
94faca81cc
commit
9490eba8dc
@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import json
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Annotated,
|
Annotated,
|
||||||
@ -406,26 +407,45 @@ if TYPE_CHECKING or not PYDANTIC_SUPPORTS_CALLABLE_DISCRIMINATOR:
|
|||||||
else:
|
else:
|
||||||
from pydantic import Discriminator, Tag
|
from pydantic import Discriminator, Tag
|
||||||
|
|
||||||
|
def _parse_json_from_string(value: Any) -> Any:
|
||||||
|
if isinstance(value, str):
|
||||||
|
try:
|
||||||
|
return json.loads(value)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return value
|
||||||
|
|
||||||
# TODO: Once we're fully on pydantic 2, we can use a RootModel here.
|
# TODO: Once we're fully on pydantic 2, we can use a RootModel here.
|
||||||
# That way we'd be able to attach methods to the Filter type.
|
# That way we'd be able to attach methods to the Filter type.
|
||||||
# e.g. replace load_filters(...) with Filter.load(...)
|
# e.g. replace load_filters(...) with Filter.load(...)
|
||||||
Filter = Annotated[
|
Filter = Annotated[
|
||||||
|
Annotated[
|
||||||
Union[
|
Union[
|
||||||
Annotated[_And, Tag(_And._field_discriminator())],
|
Annotated[_And, Tag(_And._field_discriminator())],
|
||||||
Annotated[_Or, Tag(_Or._field_discriminator())],
|
Annotated[_Or, Tag(_Or._field_discriminator())],
|
||||||
Annotated[_Not, Tag(_Not._field_discriminator())],
|
Annotated[_Not, Tag(_Not._field_discriminator())],
|
||||||
Annotated[_EntityTypeFilter, Tag(_EntityTypeFilter._field_discriminator())],
|
|
||||||
Annotated[
|
Annotated[
|
||||||
_EntitySubtypeFilter, Tag(_EntitySubtypeFilter._field_discriminator())
|
_EntityTypeFilter, Tag(_EntityTypeFilter._field_discriminator())
|
||||||
|
],
|
||||||
|
Annotated[
|
||||||
|
_EntitySubtypeFilter,
|
||||||
|
Tag(_EntitySubtypeFilter._field_discriminator()),
|
||||||
],
|
],
|
||||||
Annotated[_StatusFilter, Tag(_StatusFilter._field_discriminator())],
|
Annotated[_StatusFilter, Tag(_StatusFilter._field_discriminator())],
|
||||||
Annotated[_PlatformFilter, Tag(_PlatformFilter._field_discriminator())],
|
Annotated[_PlatformFilter, Tag(_PlatformFilter._field_discriminator())],
|
||||||
Annotated[_DomainFilter, Tag(_DomainFilter._field_discriminator())],
|
Annotated[_DomainFilter, Tag(_DomainFilter._field_discriminator())],
|
||||||
Annotated[_ContainerFilter, Tag(_ContainerFilter._field_discriminator())],
|
Annotated[
|
||||||
|
_ContainerFilter, Tag(_ContainerFilter._field_discriminator())
|
||||||
|
],
|
||||||
Annotated[_EnvFilter, Tag(_EnvFilter._field_discriminator())],
|
Annotated[_EnvFilter, Tag(_EnvFilter._field_discriminator())],
|
||||||
Annotated[_CustomCondition, Tag(_CustomCondition._field_discriminator())],
|
Annotated[
|
||||||
|
_CustomCondition, Tag(_CustomCondition._field_discriminator())
|
||||||
|
],
|
||||||
],
|
],
|
||||||
Discriminator(_filter_discriminator),
|
Discriminator(_filter_discriminator),
|
||||||
|
],
|
||||||
|
pydantic.BeforeValidator(_parse_json_from_string),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Required to resolve forward references to "Filter"
|
# Required to resolve forward references to "Filter"
|
||||||
|
|||||||
@ -334,6 +334,22 @@ def test_tagged_union_error_messages() -> None:
|
|||||||
):
|
):
|
||||||
load_filters({"and": [{"unknown_field": 6}]})
|
load_filters({"and": [{"unknown_field": 6}]})
|
||||||
|
|
||||||
|
# Test that we can load a filter from a string.
|
||||||
|
# Sometimes we get filters encoded as JSON, and we want to handle those gracefully.
|
||||||
|
filter_str = '{\n "and": [\n {"entity_type": ["dataset"]},\n {"entity_subtype": ["Table"]},\n {"platform": ["snowflake"]}\n ]\n}'
|
||||||
|
assert load_filters(filter_str) == F.and_(
|
||||||
|
F.entity_type("dataset"),
|
||||||
|
F.entity_subtype("Table"),
|
||||||
|
F.platform("snowflake"),
|
||||||
|
)
|
||||||
|
with pytest.raises(
|
||||||
|
ValidationError,
|
||||||
|
match=re.compile(
|
||||||
|
r"1 validation error.+Unable to extract tag using discriminator", re.DOTALL
|
||||||
|
),
|
||||||
|
):
|
||||||
|
load_filters("this is invalid json but should not raise a json error")
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_filter() -> None:
|
def test_invalid_filter() -> None:
|
||||||
with pytest.raises(InvalidUrnError):
|
with pytest.raises(InvalidUrnError):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user