mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-08-17 05:26:54 +00:00
MINOR - Pydantic V2 warnings and better exception msg (#16916)
This commit is contained in:
parent
5a7b0b48c2
commit
7e98ece3e5
@ -11,8 +11,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
echo "Initializing OpenMetadata Server...";
|
echo "Initializing OpenMetadata Server...";
|
||||||
# echo "Migrating the database to the latest version and the indexes in ElasticSearch...";
|
|
||||||
# ./bootstrap/bootstrap_storage.sh migrate-all
|
|
||||||
echo " ||||||| "
|
echo " ||||||| "
|
||||||
echo " |||| |||| ____ "
|
echo " |||| |||| ____ "
|
||||||
echo " |||| |||| / __ \ "
|
echo " |||| |||| / __ \ "
|
||||||
|
@ -200,7 +200,7 @@ class AirflowLineageRunner:
|
|||||||
return self.metadata.patch(
|
return self.metadata.patch(
|
||||||
entity=Pipeline,
|
entity=Pipeline,
|
||||||
source=pipeline,
|
source=pipeline,
|
||||||
destination=pipeline.copy(update=pipeline_request.__dict__),
|
destination=pipeline.model_copy(update=pipeline_request.__dict__),
|
||||||
allowed_fields=ALLOWED_COMMON_PATCH_FIELDS,
|
allowed_fields=ALLOWED_COMMON_PATCH_FIELDS,
|
||||||
restrict_update_fields=RESTRICT_UPDATE_LIST,
|
restrict_update_fields=RESTRICT_UPDATE_LIST,
|
||||||
)
|
)
|
||||||
|
@ -17,7 +17,7 @@ import traceback
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List, Optional, Union
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra
|
from pydantic import BaseModel, ConfigDict
|
||||||
from pydomo import Domo
|
from pydomo import Domo
|
||||||
|
|
||||||
from metadata.generated.schema.entity.services.connections.dashboard.domoDashboardConnection import (
|
from metadata.generated.schema.entity.services.connections.dashboard.domoDashboardConnection import (
|
||||||
@ -43,8 +43,7 @@ class DomoBaseModel(BaseModel):
|
|||||||
Domo basic configurations
|
Domo basic configurations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
@ -75,8 +74,7 @@ class DomoChartMetadataDetails(BaseModel):
|
|||||||
Metadata Details in chart
|
Metadata Details in chart
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
chartType: Optional[str] = None
|
chartType: Optional[str] = None
|
||||||
|
|
||||||
|
@ -19,14 +19,13 @@ from abc import ABC, abstractmethod
|
|||||||
from typing import IO, Any, Optional
|
from typing import IO, Any, Optional
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
class ConfigModel(BaseModel):
|
class ConfigModel(BaseModel):
|
||||||
"""Class definition for config model"""
|
"""Class definition for config model"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="forbid")
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicTypedConfig(ConfigModel):
|
class DynamicTypedConfig(ConfigModel):
|
||||||
|
@ -17,7 +17,7 @@ from datetime import datetime, timezone
|
|||||||
from types import MappingProxyType
|
from types import MappingProxyType
|
||||||
from typing import Dict, Iterable, Optional, Union, cast
|
from typing import Dict, Iterable, Optional, Union, cast
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from metadata.data_insight.processor.reports.cost_analysis_report_data_processor import (
|
from metadata.data_insight.processor.reports.cost_analysis_report_data_processor import (
|
||||||
AggregatedCostAnalysisReportDataProcessor,
|
AggregatedCostAnalysisReportDataProcessor,
|
||||||
@ -48,9 +48,7 @@ logger = profiler_logger()
|
|||||||
class DataInsightRecord(BaseModel):
|
class DataInsightRecord(BaseModel):
|
||||||
"""Return class for the OpenMetadata Profiler Source"""
|
"""Return class for the OpenMetadata Profiler Source"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
|
||||||
arbitrary_types_allowed = True
|
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
data: ReportData
|
data: ReportData
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ class SQATestSuiteInterface(SQAInterfaceMixin, TestSuiteInterface):
|
|||||||
runtime_params = setter.get_parameters(test_case)
|
runtime_params = setter.get_parameters(test_case)
|
||||||
test_case.parameterValues.append(
|
test_case.parameterValues.append(
|
||||||
TestCaseParameterValue(
|
TestCaseParameterValue(
|
||||||
name="runtimeParams", value=runtime_params.json()
|
name="runtimeParams", value=runtime_params.model_dump_json()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -243,7 +243,7 @@ class TableDiffValidator(BaseTestValidator, SQAValidatorMixin):
|
|||||||
raw = self.get_test_case_param_value(
|
raw = self.get_test_case_param_value(
|
||||||
self.test_case.parameterValues, "runtimeParams", str
|
self.test_case.parameterValues, "runtimeParams", str
|
||||||
)
|
)
|
||||||
runtime_params = TableDiffRuntimeParameters.parse_raw(raw)
|
runtime_params = TableDiffRuntimeParameters.model_validate_json(raw)
|
||||||
return runtime_params
|
return runtime_params
|
||||||
|
|
||||||
def get_row_diff_test_case_result(
|
def get_row_diff_test_case_result(
|
||||||
|
@ -13,7 +13,7 @@ Common definitions for configuration management
|
|||||||
"""
|
"""
|
||||||
from typing import Any, Optional, TypeVar
|
from typing import Any, Optional, TypeVar
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from metadata.utils.logger import ingestion_logger
|
from metadata.utils.logger import ingestion_logger
|
||||||
|
|
||||||
@ -27,8 +27,7 @@ Entity = TypeVar("Entity", bound=BaseModel)
|
|||||||
|
|
||||||
|
|
||||||
class ConfigModel(BaseModel):
|
class ConfigModel(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(extra="forbid")
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
|
|
||||||
class DynamicTypedConfig(ConfigModel):
|
class DynamicTypedConfig(ConfigModel):
|
||||||
|
@ -294,7 +294,7 @@ def _unsafe_parse_config(config: dict, cls: Type[T], message: str) -> None:
|
|||||||
cls.model_validate(config)
|
cls.model_validate(config)
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"The supported properties for {cls.__name__} are {list(cls.__fields__.keys())}"
|
f"The supported properties for {cls.__name__} are {list(cls.model_fields.keys())}"
|
||||||
)
|
)
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
@ -315,7 +315,7 @@ def _unsafe_parse_dbt_config(config: dict, cls: Type[T], message: str) -> None:
|
|||||||
cls.model_validate(config)
|
cls.model_validate(config)
|
||||||
except ValidationError as err:
|
except ValidationError as err:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"The supported properties for {cls.__name__} are {list(cls.__fields__.keys())}"
|
f"The supported properties for {cls.__name__} are {list(cls.model_fields.keys())}"
|
||||||
)
|
)
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
|
@ -328,7 +328,7 @@ class TopologyRunnerMixin(Generic[C]):
|
|||||||
"""
|
"""
|
||||||
return PatchRequest(
|
return PatchRequest(
|
||||||
original_entity=original_entity,
|
original_entity=original_entity,
|
||||||
new_entity=original_entity.copy(update=create_request.__dict__),
|
new_entity=original_entity.model_copy(update=create_request.__dict__),
|
||||||
override_metadata=self.source_config.overrideMetadata,
|
override_metadata=self.source_config.overrideMetadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -440,7 +440,7 @@ def _sort_array_entity_fields(
|
|||||||
destination_attr = destination_dict.get(model_str(source_attr.name))
|
destination_attr = destination_dict.get(model_str(source_attr.name))
|
||||||
if destination_attr:
|
if destination_attr:
|
||||||
updated_attributes.append(
|
updated_attributes.append(
|
||||||
source_attr.copy(update=destination_attr.__dict__)
|
source_attr.model_copy(update=destination_attr.__dict__)
|
||||||
)
|
)
|
||||||
# Remove the updated attribute from the destination dictionary
|
# Remove the updated attribute from the destination dictionary
|
||||||
del destination_dict[model_str(source_attr.name)]
|
del destination_dict[model_str(source_attr.name)]
|
||||||
|
@ -296,7 +296,7 @@ class TopologyContextManager:
|
|||||||
|
|
||||||
# If it does not exist yet, copies the Parent Context in order to have all context gathered until this point.
|
# If it does not exist yet, copies the Parent Context in order to have all context gathered until this point.
|
||||||
self.contexts.setdefault(
|
self.contexts.setdefault(
|
||||||
thread_id, self.contexts[parent_thread_id].copy(deep=True)
|
thread_id, self.contexts[parent_thread_id].model_copy(deep=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# https://docs.pydantic.dev/latest/usage/exporting_models/#modelcopy
|
# https://docs.pydantic.dev/latest/usage/exporting_models/#modelcopy
|
||||||
destination = source.copy(deep=True)
|
destination = source.model_copy(deep=True)
|
||||||
destination.description = Markdown(description)
|
destination.description = Markdown(description)
|
||||||
|
|
||||||
return self.patch(entity=entity, source=source, destination=destination)
|
return self.patch(entity=entity, source=source, destination=destination)
|
||||||
@ -228,7 +228,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
|
|
||||||
table.tableConstraints = instance.tableConstraints
|
table.tableConstraints = instance.tableConstraints
|
||||||
|
|
||||||
destination = table.copy(deep=True)
|
destination = table.model_copy(deep=True)
|
||||||
destination.tableConstraints = constraints
|
destination.tableConstraints = constraints
|
||||||
|
|
||||||
return self.patch(entity=Table, source=table, destination=destination)
|
return self.patch(entity=Table, source=table, destination=destination)
|
||||||
@ -253,7 +253,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
if not source:
|
if not source:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
destination = source.copy(deep=True)
|
destination = source.model_copy(deep=True)
|
||||||
|
|
||||||
destination.entityLink = EntityLink(entity_link)
|
destination.entityLink = EntityLink(entity_link)
|
||||||
if test_case_parameter_values:
|
if test_case_parameter_values:
|
||||||
@ -291,7 +291,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
|
|
||||||
# Initialize empty tag list or the last updated tags
|
# Initialize empty tag list or the last updated tags
|
||||||
source.tags = instance.tags or []
|
source.tags = instance.tags or []
|
||||||
destination = source.copy(deep=True)
|
destination = source.model_copy(deep=True)
|
||||||
|
|
||||||
tag_fqns = {label.tagFQN.root for label in tag_labels}
|
tag_fqns = {label.tagFQN.root for label in tag_labels}
|
||||||
|
|
||||||
@ -385,7 +385,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
# Make sure we run the patch against the last updated data from the API
|
# Make sure we run the patch against the last updated data from the API
|
||||||
table.columns = instance.columns
|
table.columns = instance.columns
|
||||||
|
|
||||||
destination = table.copy(deep=True)
|
destination = table.model_copy(deep=True)
|
||||||
for column_tag in column_tags or []:
|
for column_tag in column_tags or []:
|
||||||
update_column_tags(destination.columns, column_tag, operation)
|
update_column_tags(destination.columns, column_tag, operation)
|
||||||
|
|
||||||
@ -472,7 +472,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
# Make sure we run the patch against the last updated data from the API
|
# Make sure we run the patch against the last updated data from the API
|
||||||
table.columns = instance.columns
|
table.columns = instance.columns
|
||||||
|
|
||||||
destination = table.copy(deep=True)
|
destination = table.model_copy(deep=True)
|
||||||
update_column_description(destination.columns, column_descriptions, force)
|
update_column_description(destination.columns, column_descriptions, force)
|
||||||
|
|
||||||
patched_entity = self.patch(entity=Table, source=table, destination=destination)
|
patched_entity = self.patch(entity=Table, source=table, destination=destination)
|
||||||
@ -531,7 +531,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
:param life_cycle_data: Life Cycle data to add
|
:param life_cycle_data: Life Cycle data to add
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
destination = entity.copy(deep=True)
|
destination = entity.model_copy(deep=True)
|
||||||
destination.lifeCycle = life_cycle
|
destination.lifeCycle = life_cycle
|
||||||
return self.patch(
|
return self.patch(
|
||||||
entity=type(entity), source=entity, destination=destination
|
entity=type(entity), source=entity, destination=destination
|
||||||
@ -546,7 +546,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
|
|||||||
def patch_domain(self, entity: Entity, domain: Domain) -> Optional[Entity]:
|
def patch_domain(self, entity: Entity, domain: Domain) -> Optional[Entity]:
|
||||||
"""Patch domain data for an Entity"""
|
"""Patch domain data for an Entity"""
|
||||||
try:
|
try:
|
||||||
destination: Entity = entity.copy(deep=True)
|
destination: Entity = entity.model_copy(deep=True)
|
||||||
destination.domain = EntityReference(id=domain.id, type="domain")
|
destination.domain = EntityReference(id=domain.id, type="domain")
|
||||||
return self.patch(
|
return self.patch(
|
||||||
entity=type(entity), source=entity, destination=destination
|
entity=type(entity), source=entity, destination=destination
|
||||||
|
@ -60,7 +60,7 @@ class FileSink(Sink):
|
|||||||
if self.wrote_something:
|
if self.wrote_something:
|
||||||
self.file.write(",\n")
|
self.file.write(",\n")
|
||||||
|
|
||||||
self.file.write(record.json())
|
self.file.write(record.model_dump_json())
|
||||||
self.wrote_something = True
|
self.wrote_something = True
|
||||||
return Either(right=get_log_name(record))
|
return Either(right=get_log_name(record))
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ class DashboardServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
metadata: OpenMetadata
|
metadata: OpenMetadata
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: DashboardConnection.__fields__["config"].annotation
|
service_connection: DashboardConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = DashboardServiceTopology()
|
topology = DashboardServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
@ -608,7 +608,7 @@ class DashboardServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
"""
|
"""
|
||||||
patch_request = PatchRequest(
|
patch_request = PatchRequest(
|
||||||
original_entity=original_entity,
|
original_entity=original_entity,
|
||||||
new_entity=original_entity.copy(update=create_request.__dict__),
|
new_entity=original_entity.model_copy(update=create_request.__dict__),
|
||||||
)
|
)
|
||||||
if isinstance(original_entity, Dashboard):
|
if isinstance(original_entity, Dashboard):
|
||||||
# For patch the charts need to be entity ref instead of fqn
|
# For patch the charts need to be entity ref instead of fqn
|
||||||
|
@ -18,7 +18,7 @@ from time import sleep
|
|||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import msal
|
import msal
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from metadata.generated.schema.entity.services.connections.dashboard.powerBIConnection import (
|
from metadata.generated.schema.entity.services.connections.dashboard.powerBIConnection import (
|
||||||
PowerBIConnection,
|
PowerBIConnection,
|
||||||
@ -327,8 +327,7 @@ class PowerBiApiClient:
|
|||||||
|
|
||||||
|
|
||||||
class PowerBiClient(BaseModel):
|
class PowerBiClient(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
api_client: PowerBiApiClient
|
api_client: PowerBiApiClient
|
||||||
file_client: Optional[PowerBiFileClient]
|
file_client: Optional[PowerBiFileClient]
|
||||||
|
@ -15,7 +15,7 @@ Tableau Source Model module
|
|||||||
|
|
||||||
from typing import Any, Dict, List, Optional, Union
|
from typing import Any, Dict, List, Optional, Union
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra, Field, validator
|
from pydantic import BaseModel, ConfigDict, Field, validator
|
||||||
|
|
||||||
from metadata.generated.schema.entity.data.chart import ChartType
|
from metadata.generated.schema.entity.data.chart import ChartType
|
||||||
|
|
||||||
@ -25,8 +25,7 @@ class TableauBaseModel(BaseModel):
|
|||||||
Tableau basic configurations
|
Tableau basic configurations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
name: Optional[str] = None
|
name: Optional[str] = None
|
||||||
@ -54,8 +53,7 @@ class TableauTag(BaseModel):
|
|||||||
Aux class for Tag object of the tableau_api_lib response
|
Aux class for Tag object of the tableau_api_lib response
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(frozen=True)
|
||||||
frozen = True
|
|
||||||
|
|
||||||
label: str
|
label: str
|
||||||
|
|
||||||
@ -153,8 +151,7 @@ class TableauDashboard(TableauBaseModel):
|
|||||||
Aux class for Dashboard object of the tableau_api_lib response
|
Aux class for Dashboard object of the tableau_api_lib response
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
project: Optional[TableauBaseModel] = None
|
project: Optional[TableauBaseModel] = None
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
|
@ -223,7 +223,7 @@ class DatabaseServiceSource(
|
|||||||
database_source_state: Set = set()
|
database_source_state: Set = set()
|
||||||
stored_procedure_source_state: Set = set()
|
stored_procedure_source_state: Set = set()
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: DatabaseConnection.__fields__["config"].annotation
|
service_connection: DatabaseConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
# When processing the database, the source will update the inspector if needed
|
# When processing the database, the source will update the inspector if needed
|
||||||
inspector: Inspector
|
inspector: Inspector
|
||||||
|
@ -15,12 +15,11 @@ Domo Database Source Model module
|
|||||||
|
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra, Field
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
|
|
||||||
|
|
||||||
class DomoDatabaseBaseModel(BaseModel):
|
class DomoDatabaseBaseModel(BaseModel):
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
name: str
|
name: str
|
||||||
|
@ -17,7 +17,7 @@ from datetime import datetime
|
|||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Dict, Iterable, List, Optional, Type
|
from typing import Dict, Iterable, List, Optional, Type
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
|
|
||||||
from metadata.generated.schema.entity.data.table import Table
|
from metadata.generated.schema.entity.data.table import Table
|
||||||
@ -46,12 +46,11 @@ class LifeCycleQueryByTable(BaseModel):
|
|||||||
Query executed get life cycle
|
Query executed get life cycle
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
|
|
||||||
table_name: str = Field(..., alias="TABLE_NAME")
|
table_name: str = Field(..., alias="TABLE_NAME")
|
||||||
created_at: Optional[datetime] = Field(None, alias="CREATED_AT")
|
created_at: Optional[datetime] = Field(None, alias="CREATED_AT")
|
||||||
|
|
||||||
class Config:
|
|
||||||
populate_by_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class LifeCycleQueryMixin:
|
class LifeCycleQueryMixin:
|
||||||
"""
|
"""
|
||||||
|
@ -591,7 +591,7 @@ class SasSource(
|
|||||||
)
|
)
|
||||||
self.metadata.client.put(
|
self.metadata.client.put(
|
||||||
path=f"{self.metadata.get_suffix(Table)}/{table_entity.id.root}/tableProfile",
|
path=f"{self.metadata.get_suffix(Table)}/{table_entity.id.root}/tableProfile",
|
||||||
data=table_profile_request.json(),
|
data=table_profile_request.model_dump_json(),
|
||||||
)
|
)
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
@ -16,7 +16,7 @@ Kinesis Models
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
class KinesisEnum(Enum):
|
class KinesisEnum(Enum):
|
||||||
@ -66,8 +66,7 @@ class KinesisArgs(BaseModel):
|
|||||||
Model for Kinesis API Arguments
|
Model for Kinesis API Arguments
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
Limit: int = 100
|
Limit: int = 100
|
||||||
|
|
||||||
@ -77,8 +76,7 @@ class KinesisStreamArgs(BaseModel):
|
|||||||
Model for Kinesis Stream API Arguments
|
Model for Kinesis Stream API Arguments
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow")
|
||||||
extra = Extra.allow
|
|
||||||
|
|
||||||
StreamName: str
|
StreamName: str
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ class MessagingServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
source_config: MessagingServiceMetadataPipeline
|
source_config: MessagingServiceMetadataPipeline
|
||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: MessagingConnection.__fields__["config"].annotation
|
service_connection: MessagingConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = MessagingServiceTopology()
|
topology = MessagingServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
|
@ -104,7 +104,7 @@ class MlModelServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
source_config: MlModelServiceMetadataPipeline
|
source_config: MlModelServiceMetadataPipeline
|
||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: MlModelConnection.__fields__["config"].annotation
|
service_connection: MlModelConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = MlModelServiceTopology()
|
topology = MlModelServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
|
@ -73,7 +73,7 @@ from functools import singledispatch
|
|||||||
from typing import Any, DefaultDict, Dict, List, Optional, Type
|
from typing import Any, DefaultDict, Dict, List, Optional, Type
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from metadata.generated.schema.entity.data.table import Table
|
from metadata.generated.schema.entity.data.table import Table
|
||||||
from metadata.ingestion.ometa.models import T
|
from metadata.ingestion.ometa.models import T
|
||||||
@ -129,12 +129,11 @@ class XLets(BaseModel):
|
|||||||
Group inlets and outlets from all tasks in a DAG
|
Group inlets and outlets from all tasks in a DAG
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
|
|
||||||
inlets: List[OMEntity]
|
inlets: List[OMEntity]
|
||||||
outlets: List[OMEntity]
|
outlets: List[OMEntity]
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
|
|
||||||
def concat_dict_values(
|
def concat_dict_values(
|
||||||
dict_1: DefaultDict[str, List[Any]], dict_2: Optional[Dict[str, List[Any]]]
|
dict_1: DefaultDict[str, List[Any]], dict_2: Optional[Dict[str, List[Any]]]
|
||||||
|
@ -16,7 +16,7 @@ Tableau Source Model module
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra, Field
|
from pydantic import BaseModel, ConfigDict, Field
|
||||||
|
|
||||||
|
|
||||||
class AirflowBaseModel(BaseModel):
|
class AirflowBaseModel(BaseModel):
|
||||||
@ -24,9 +24,7 @@ class AirflowBaseModel(BaseModel):
|
|||||||
Tableau basic configurations
|
Tableau basic configurations
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
|
||||||
extra = Extra.allow
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
dag_id: str
|
dag_id: str
|
||||||
|
|
||||||
@ -44,8 +42,7 @@ class AirflowTask(BaseModel):
|
|||||||
owner: Optional[str] = None
|
owner: Optional[str] = None
|
||||||
|
|
||||||
# Allow picking up data from key `inlets` and `_inlets`
|
# Allow picking up data from key `inlets` and `_inlets`
|
||||||
class Config:
|
model_config = ConfigDict(populate_by_name=True)
|
||||||
populate_by_name = True
|
|
||||||
|
|
||||||
|
|
||||||
class TaskList(BaseModel):
|
class TaskList(BaseModel):
|
||||||
|
@ -120,7 +120,7 @@ class PipelineServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
source_config: PipelineServiceMetadataPipeline
|
source_config: PipelineServiceMetadataPipeline
|
||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: PipelineConnection.__fields__["config"].annotation
|
service_connection: PipelineConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = PipelineServiceTopology()
|
topology = PipelineServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
|
@ -114,7 +114,7 @@ class SearchServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
source_config: SearchServiceMetadataPipeline
|
source_config: SearchServiceMetadataPipeline
|
||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: SearchConnection.__fields__["config"].annotation
|
service_connection: SearchConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = SearchServiceTopology()
|
topology = SearchServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
|
@ -121,7 +121,7 @@ class StorageServiceSource(TopologyRunnerMixin, Source, ABC):
|
|||||||
config: WorkflowSource
|
config: WorkflowSource
|
||||||
metadata: OpenMetadata
|
metadata: OpenMetadata
|
||||||
# Big union of types we want to fetch dynamically
|
# Big union of types we want to fetch dynamically
|
||||||
service_connection: StorageConnection.__fields__["config"].annotation
|
service_connection: StorageConnection.model_fields["config"].annotation
|
||||||
|
|
||||||
topology = StorageServiceTopology()
|
topology = StorageServiceTopology()
|
||||||
context = TopologyContextManager(topology)
|
context = TopologyContextManager(topology)
|
||||||
|
@ -17,6 +17,7 @@ multiple profilers per table and columns.
|
|||||||
"""
|
"""
|
||||||
from typing import List, Optional, Type, Union
|
from typing import List, Optional, Type, Union
|
||||||
|
|
||||||
|
from pydantic import ConfigDict
|
||||||
from sqlalchemy import Column
|
from sqlalchemy import Column
|
||||||
from sqlalchemy.orm import DeclarativeMeta
|
from sqlalchemy.orm import DeclarativeMeta
|
||||||
|
|
||||||
@ -124,10 +125,9 @@ class ProfilerResponse(ConfigModel):
|
|||||||
class ThreadPoolMetrics(ConfigModel):
|
class ThreadPoolMetrics(ConfigModel):
|
||||||
"""A container for all metrics to be computed on the same thread."""
|
"""A container for all metrics to be computed on the same thread."""
|
||||||
|
|
||||||
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||||
|
|
||||||
metrics: Union[List[Union[Type[Metric], CustomMetric]], Type[Metric]]
|
metrics: Union[List[Union[Type[Metric], CustomMetric]], Type[Metric]]
|
||||||
metric_type: MetricTypes
|
metric_type: MetricTypes
|
||||||
column: Optional[Union[Column, SQALikeColumn]] = None
|
column: Optional[Union[Column, SQALikeColumn]] = None
|
||||||
table: Union[Table, DeclarativeMeta]
|
table: Union[Table, DeclarativeMeta]
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
@ -14,7 +14,7 @@ OpenMetadata source for the profiler
|
|||||||
import traceback
|
import traceback
|
||||||
from typing import Iterable, Optional, cast
|
from typing import Iterable, Optional, cast
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
from metadata.generated.schema.entity.data.database import Database
|
from metadata.generated.schema.entity.data.database import Database
|
||||||
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
||||||
@ -50,9 +50,7 @@ TAGS_FIELD = ["tags"]
|
|||||||
class ProfilerSourceAndEntity(BaseModel):
|
class ProfilerSourceAndEntity(BaseModel):
|
||||||
"""Return class for the OpenMetadata Profiler Source"""
|
"""Return class for the OpenMetadata Profiler Source"""
|
||||||
|
|
||||||
class Config:
|
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
|
||||||
arbitrary_types_allowed = True
|
|
||||||
extra = "forbid"
|
|
||||||
|
|
||||||
profiler_source: ProfilerSource
|
profiler_source: ProfilerSource
|
||||||
entity: Table
|
entity: Table
|
||||||
|
@ -26,7 +26,7 @@ def update_repository_name(
|
|||||||
Given an original set of credentials and a new repository name,
|
Given an original set of credentials and a new repository name,
|
||||||
return the updated credentials
|
return the updated credentials
|
||||||
"""
|
"""
|
||||||
updated = original.copy(deep=True)
|
updated = original.model_copy(deep=True)
|
||||||
updated.repositoryName = RepositoryName(name)
|
updated.repositoryName = RepositoryName(name)
|
||||||
|
|
||||||
return updated
|
return updated
|
||||||
|
@ -39,6 +39,10 @@ def ignore_ssl_init(_: Optional[SslConfig]) -> bool:
|
|||||||
|
|
||||||
@ssl_verification_registry.add(VerifySSL.validate.value)
|
@ssl_verification_registry.add(VerifySSL.validate.value)
|
||||||
def validate_ssl_init(ssl_config: Optional[SslConfig]) -> str:
|
def validate_ssl_init(ssl_config: Optional[SslConfig]) -> str:
|
||||||
|
if ssl_config is None:
|
||||||
|
raise InvalidSSLVerificationException(
|
||||||
|
"You have Verify SSL but the SSL Config is missing. Make sure to inform the CA Certificate path."
|
||||||
|
)
|
||||||
return ssl_config.root.caCertificate.get_secret_value()
|
return ssl_config.root.caCertificate.get_secret_value()
|
||||||
|
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ class CliCommonDB:
|
|||||||
self.get_profiler_time_partition_results()
|
self.get_profiler_time_partition_results()
|
||||||
)
|
)
|
||||||
if expected_profiler_time_partition_results:
|
if expected_profiler_time_partition_results:
|
||||||
table_profile = profile.profile.dict()
|
table_profile = profile.profile.model_dump()
|
||||||
for key in expected_profiler_time_partition_results["table_profile"]:
|
for key in expected_profiler_time_partition_results["table_profile"]:
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
table_profile[key],
|
table_profile[key],
|
||||||
@ -122,7 +122,7 @@ class CliCommonDB:
|
|||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
if expected_column_profile:
|
if expected_column_profile:
|
||||||
column_profile = column.profile.dict()
|
column_profile = column.profile.model_dump()
|
||||||
for key in expected_column_profile: # type: ignore
|
for key in expected_column_profile: # type: ignore
|
||||||
if key == "nonParametricSkew":
|
if key == "nonParametricSkew":
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -53,7 +53,7 @@ def ingest_mysql_service(
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": LogLevels.DEBUG.value,
|
"loggerLevel": LogLevels.DEBUG.value,
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
||||||
|
@ -130,7 +130,7 @@ class OMetaChartTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateChartRequest(**updated)
|
updated_entity = CreateChartRequest(**updated)
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ class OMetaDashboardTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateDashboardRequest(**updated)
|
updated_entity = CreateDashboardRequest(**updated)
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ class OMetaDatabaseTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateDatabaseRequest(**updated)
|
updated_entity = CreateDatabaseRequest(**updated)
|
||||||
|
|
||||||
|
@ -162,7 +162,7 @@ class OMetaModelTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateMlModelRequest(**updated)
|
updated_entity = CreateMlModelRequest(**updated)
|
||||||
|
|
||||||
|
@ -142,7 +142,7 @@ class OMetaPipelineTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreatePipelineRequest(**updated)
|
updated_entity = CreatePipelineRequest(**updated)
|
||||||
|
|
||||||
|
@ -243,7 +243,7 @@ class OMetaRolePolicyTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create_policy)
|
res_create = self.metadata.create_or_update(data=self.create_policy)
|
||||||
|
|
||||||
updated = self.create_policy.dict(exclude_unset=True)
|
updated = self.create_policy.model_dump(exclude_unset=True)
|
||||||
updated["rules"] = [self.rule_3]
|
updated["rules"] = [self.rule_3]
|
||||||
updated_policy_entity = CreatePolicyRequest(**updated)
|
updated_policy_entity = CreatePolicyRequest(**updated)
|
||||||
|
|
||||||
@ -463,7 +463,7 @@ class OMetaRolePolicyTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create_role)
|
res_create = self.metadata.create_or_update(data=self.create_role)
|
||||||
|
|
||||||
updated = self.create_role.dict(exclude_unset=True)
|
updated = self.create_role.model_dump(exclude_unset=True)
|
||||||
updated["policies"] = [self.role_policy_2.name]
|
updated["policies"] = [self.role_policy_2.name]
|
||||||
updated_entity = CreateRoleRequest(**updated)
|
updated_entity = CreateRoleRequest(**updated)
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ class OMetaObjectStoreTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateContainerRequest(**updated)
|
updated_entity = CreateContainerRequest(**updated)
|
||||||
|
|
||||||
|
@ -231,7 +231,7 @@ class OMetaTableTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateTableRequest(**updated)
|
updated_entity = CreateTableRequest(**updated)
|
||||||
|
|
||||||
|
@ -130,7 +130,7 @@ class OMetaTopicTest(TestCase):
|
|||||||
|
|
||||||
res_create = self.metadata.create_or_update(data=self.create)
|
res_create = self.metadata.create_or_update(data=self.create)
|
||||||
|
|
||||||
updated = self.create.dict(exclude_unset=True)
|
updated = self.create.model_dump(exclude_unset=True)
|
||||||
updated["owner"] = self.owner
|
updated["owner"] = self.owner
|
||||||
updated_entity = CreateTopicRequest(**updated)
|
updated_entity = CreateTopicRequest(**updated)
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ def test_incompatible_column_type(ingest_metadata, metadata: OpenMetadata, db_se
|
|||||||
"entityFullyQualifiedName": f"{db_service.fullyQualifiedName.root}.dvdrental.public.customer",
|
"entityFullyQualifiedName": f"{db_service.fullyQualifiedName.root}.dvdrental.public.customer",
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"serviceConnection": db_service.connection.dict(),
|
"serviceConnection": db_service.connection.model_dump(),
|
||||||
},
|
},
|
||||||
"processor": {
|
"processor": {
|
||||||
"type": "orm-test-runner",
|
"type": "orm-test-runner",
|
||||||
@ -166,7 +166,7 @@ def test_incompatible_column_type(ingest_metadata, metadata: OpenMetadata, db_se
|
|||||||
},
|
},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
test_suite_procesor = TestSuiteWorkflow.create(workflow_config)
|
test_suite_procesor = TestSuiteWorkflow.create(workflow_config)
|
||||||
|
@ -145,7 +145,7 @@ def test_ingest_query_log(db_service, ingest_metadata, metadata: OpenMetadata):
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
||||||
@ -199,7 +199,7 @@ def run_profiler_workflow(ingest_metadata, db_service, metadata):
|
|||||||
loggerLevel=LogLevels.DEBUG, openMetadataServerConfig=metadata.config
|
loggerLevel=LogLevels.DEBUG, openMetadataServerConfig=metadata.config
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
metadata_ingestion = ProfilerWorkflow.create(workflow_config.dict())
|
metadata_ingestion = ProfilerWorkflow.create(workflow_config.model_dump())
|
||||||
search_cache.clear()
|
search_cache.clear()
|
||||||
metadata_ingestion.execute()
|
metadata_ingestion.execute()
|
||||||
return
|
return
|
||||||
@ -212,7 +212,7 @@ def ingest_query_usage(ingest_metadata, db_service, metadata):
|
|||||||
"source": {
|
"source": {
|
||||||
"type": "postgres-usage",
|
"type": "postgres-usage",
|
||||||
"serviceName": db_service.fullyQualifiedName.root,
|
"serviceName": db_service.fullyQualifiedName.root,
|
||||||
"serviceConnection": db_service.connection.dict(),
|
"serviceConnection": db_service.connection.model_dump(),
|
||||||
"sourceConfig": {
|
"sourceConfig": {
|
||||||
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
||||||
},
|
},
|
||||||
@ -233,7 +233,7 @@ def ingest_query_usage(ingest_metadata, db_service, metadata):
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
workflow = UsageWorkflow.create(workflow_config)
|
workflow = UsageWorkflow.create(workflow_config)
|
||||||
@ -278,7 +278,7 @@ def run_usage_workflow(db_service, metadata):
|
|||||||
"source": {
|
"source": {
|
||||||
"type": "postgres-usage",
|
"type": "postgres-usage",
|
||||||
"serviceName": db_service.fullyQualifiedName.root,
|
"serviceName": db_service.fullyQualifiedName.root,
|
||||||
"serviceConnection": db_service.connection.dict(),
|
"serviceConnection": db_service.connection.model_dump(),
|
||||||
"sourceConfig": {
|
"sourceConfig": {
|
||||||
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
||||||
},
|
},
|
||||||
@ -299,7 +299,7 @@ def run_usage_workflow(db_service, metadata):
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
workflow = UsageWorkflow.create(workflow_config)
|
workflow = UsageWorkflow.create(workflow_config)
|
||||||
@ -317,7 +317,7 @@ def test_usage_delete_usage(db_service, ingest_postgres_lineage, metadata):
|
|||||||
"source": {
|
"source": {
|
||||||
"type": "postgres-usage",
|
"type": "postgres-usage",
|
||||||
"serviceName": db_service.fullyQualifiedName.root,
|
"serviceName": db_service.fullyQualifiedName.root,
|
||||||
"serviceConnection": db_service.connection.dict(),
|
"serviceConnection": db_service.connection.model_dump(),
|
||||||
"sourceConfig": {
|
"sourceConfig": {
|
||||||
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
|
||||||
},
|
},
|
||||||
@ -338,7 +338,7 @@ def test_usage_delete_usage(db_service, ingest_postgres_lineage, metadata):
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
workflow = UsageWorkflow.create(workflow_config)
|
workflow = UsageWorkflow.create(workflow_config)
|
||||||
|
@ -73,7 +73,7 @@ def test_sample_data(db_service, db_fqn, metadata):
|
|||||||
},
|
},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": LogLevels.DEBUG,
|
"loggerLevel": LogLevels.DEBUG,
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
profiler_workflow = ProfilerWorkflow.create(workflow_config)
|
profiler_workflow = ProfilerWorkflow.create(workflow_config)
|
||||||
|
@ -136,7 +136,7 @@ def ingest_metadata(mssql_container, metadata: OpenMetadata, request):
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "DEBUG",
|
"loggerLevel": "DEBUG",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
||||||
@ -183,7 +183,7 @@ def run_lineage_workflow(
|
|||||||
"sink": {"type": "metadata-rest", "config": {}},
|
"sink": {"type": "metadata-rest", "config": {}},
|
||||||
"workflowConfig": {
|
"workflowConfig": {
|
||||||
"loggerLevel": "INFO",
|
"loggerLevel": "INFO",
|
||||||
"openMetadataServerConfig": metadata.config.dict(),
|
"openMetadataServerConfig": metadata.config.model_dump(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
metadata_ingestion = MetadataWorkflow.create(workflow_config)
|
||||||
|
@ -194,7 +194,7 @@ class EntityReportProcessorTest(unittest.TestCase):
|
|||||||
flat_result.timestamp = Timestamp(1695324826495)
|
flat_result.timestamp = Timestamp(1695324826495)
|
||||||
processed.append(flat_result)
|
processed.append(flat_result)
|
||||||
assert all(
|
assert all(
|
||||||
k in flat_result.data.dict()
|
k in flat_result.data.model_dump()
|
||||||
for k in [
|
for k in [
|
||||||
"entityType",
|
"entityType",
|
||||||
"entityTier",
|
"entityTier",
|
||||||
|
@ -448,12 +448,12 @@ public class OpenMetadataApplication extends Application<OpenMetadataApplication
|
|||||||
String maxMigration = Migration.lastMigrationFile(conf.getMigrationConfiguration());
|
String maxMigration = Migration.lastMigrationFile(conf.getMigrationConfiguration());
|
||||||
if (lastMigrated.isEmpty()) {
|
if (lastMigrated.isEmpty()) {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
"Could not validate Flyway migrations in the database. Make sure you have run `./bootstrap/bootstrap_storage.sh migrate-all` at least once.");
|
"Could not validate Flyway migrations in the database. Make sure you have run `./bootstrap/openmetadata-ops.sh migrate` at least once.");
|
||||||
}
|
}
|
||||||
if (lastMigrated.get().compareTo(maxMigration) < 0) {
|
if (lastMigrated.get().compareTo(maxMigration) < 0) {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
"There are pending migrations to be run on the database."
|
"There are pending migrations to be run on the database."
|
||||||
+ " Please backup your data and run `./bootstrap/bootstrap_storage.sh migrate-all`."
|
+ " Please backup your data and run `./bootstrap/openmetadata-ops.sh migrate`."
|
||||||
+ " You can find more information on upgrading OpenMetadata at"
|
+ " You can find more information on upgrading OpenMetadata at"
|
||||||
+ " https://docs.open-metadata.org/deployment/upgrade ");
|
+ " https://docs.open-metadata.org/deployment/upgrade ");
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ public final class Migration {
|
|||||||
} catch (StatementException e) {
|
} catch (StatementException e) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Exception encountered when trying to obtain last migrated Flyway version."
|
"Exception encountered when trying to obtain last migrated Flyway version."
|
||||||
+ " Make sure you have run `./bootstrap/bootstrap_storage.sh migrate-all` at least once.",
|
+ " Make sure you have run `./bootstrap/openmetadata-ops.sh migrate` at least once.",
|
||||||
e);
|
e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ public class MigrationWorkflow {
|
|||||||
if (!migrations.isEmpty()) {
|
if (!migrations.isEmpty()) {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
"There are pending migrations to be run on the database."
|
"There are pending migrations to be run on the database."
|
||||||
+ " Please backup your data and run `./bootstrap/bootstrap_storage.sh migrate-all`."
|
+ " Please backup your data and run `./bootstrap/openmetadata-ops.sh migrate`."
|
||||||
+ " You can find more information on upgrading OpenMetadata at"
|
+ " You can find more information on upgrading OpenMetadata at"
|
||||||
+ " https://docs.open-metadata.org/deployment/upgrade ");
|
+ " https://docs.open-metadata.org/deployment/upgrade ");
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user