MINOR - Pydantic V2 warnings and better exception msg (#16916)

This commit is contained in:
Pere Miquel Brull 2024-07-04 14:54:41 +02:00 committed by GitHub
parent 5a7b0b48c2
commit 7e98ece3e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
52 changed files with 96 additions and 114 deletions

View File

@ -11,8 +11,6 @@
# limitations under the License.
echo "Initializing OpenMetadata Server...";
# echo "Migrating the database to the latest version and the indexes in ElasticSearch...";
# ./bootstrap/bootstrap_storage.sh migrate-all
echo " ||||||| "
echo " |||| |||| ____ "
echo " |||| |||| / __ \ "

View File

@ -200,7 +200,7 @@ class AirflowLineageRunner:
return self.metadata.patch(
entity=Pipeline,
source=pipeline,
destination=pipeline.copy(update=pipeline_request.__dict__),
destination=pipeline.model_copy(update=pipeline_request.__dict__),
allowed_fields=ALLOWED_COMMON_PATCH_FIELDS,
restrict_update_fields=RESTRICT_UPDATE_LIST,
)

View File

@ -17,7 +17,7 @@ import traceback
from dataclasses import dataclass
from typing import List, Optional, Union
from pydantic import BaseModel, Extra
from pydantic import BaseModel, ConfigDict
from pydomo import Domo
from metadata.generated.schema.entity.services.connections.dashboard.domoDashboardConnection import (
@ -43,8 +43,7 @@ class DomoBaseModel(BaseModel):
Domo basic configurations
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
id: str
name: str
@ -75,8 +74,7 @@ class DomoChartMetadataDetails(BaseModel):
Metadata Details in chart
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
chartType: Optional[str] = None

View File

@ -19,14 +19,13 @@ from abc import ABC, abstractmethod
from typing import IO, Any, Optional
import yaml
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
class ConfigModel(BaseModel):
"""Class definition for config model"""
class Config:
extra = "forbid"
model_config = ConfigDict(extra="forbid")
class DynamicTypedConfig(ConfigModel):

View File

@ -17,7 +17,7 @@ from datetime import datetime, timezone
from types import MappingProxyType
from typing import Dict, Iterable, Optional, Union, cast
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from metadata.data_insight.processor.reports.cost_analysis_report_data_processor import (
AggregatedCostAnalysisReportDataProcessor,
@ -48,9 +48,7 @@ logger = profiler_logger()
class DataInsightRecord(BaseModel):
"""Return class for the OpenMetadata Profiler Source"""
class Config:
arbitrary_types_allowed = True
extra = "forbid"
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
data: ReportData

View File

@ -175,7 +175,7 @@ class SQATestSuiteInterface(SQAInterfaceMixin, TestSuiteInterface):
runtime_params = setter.get_parameters(test_case)
test_case.parameterValues.append(
TestCaseParameterValue(
name="runtimeParams", value=runtime_params.json()
name="runtimeParams", value=runtime_params.model_dump_json()
)
)

View File

@ -243,7 +243,7 @@ class TableDiffValidator(BaseTestValidator, SQAValidatorMixin):
raw = self.get_test_case_param_value(
self.test_case.parameterValues, "runtimeParams", str
)
runtime_params = TableDiffRuntimeParameters.parse_raw(raw)
runtime_params = TableDiffRuntimeParameters.model_validate_json(raw)
return runtime_params
def get_row_diff_test_case_result(

View File

@ -13,7 +13,7 @@ Common definitions for configuration management
"""
from typing import Any, Optional, TypeVar
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from metadata.utils.logger import ingestion_logger
@ -27,8 +27,7 @@ Entity = TypeVar("Entity", bound=BaseModel)
class ConfigModel(BaseModel):
class Config:
extra = "forbid"
model_config = ConfigDict(extra="forbid")
class DynamicTypedConfig(ConfigModel):

View File

@ -294,7 +294,7 @@ def _unsafe_parse_config(config: dict, cls: Type[T], message: str) -> None:
cls.model_validate(config)
except ValidationError as err:
logger.debug(
f"The supported properties for {cls.__name__} are {list(cls.__fields__.keys())}"
f"The supported properties for {cls.__name__} are {list(cls.model_fields.keys())}"
)
raise err
@ -315,7 +315,7 @@ def _unsafe_parse_dbt_config(config: dict, cls: Type[T], message: str) -> None:
cls.model_validate(config)
except ValidationError as err:
logger.debug(
f"The supported properties for {cls.__name__} are {list(cls.__fields__.keys())}"
f"The supported properties for {cls.__name__} are {list(cls.model_fields.keys())}"
)
raise err

View File

@ -328,7 +328,7 @@ class TopologyRunnerMixin(Generic[C]):
"""
return PatchRequest(
original_entity=original_entity,
new_entity=original_entity.copy(update=create_request.__dict__),
new_entity=original_entity.model_copy(update=create_request.__dict__),
override_metadata=self.source_config.overrideMetadata,
)

View File

@ -440,7 +440,7 @@ def _sort_array_entity_fields(
destination_attr = destination_dict.get(model_str(source_attr.name))
if destination_attr:
updated_attributes.append(
source_attr.copy(update=destination_attr.__dict__)
source_attr.model_copy(update=destination_attr.__dict__)
)
# Remove the updated attribute from the destination dictionary
del destination_dict[model_str(source_attr.name)]

View File

@ -296,7 +296,7 @@ class TopologyContextManager:
# If it does not exist yet, copies the Parent Context in order to have all context gathered until this point.
self.contexts.setdefault(
thread_id, self.contexts[parent_thread_id].copy(deep=True)
thread_id, self.contexts[parent_thread_id].model_copy(deep=True)
)

View File

@ -199,7 +199,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
return None
# https://docs.pydantic.dev/latest/usage/exporting_models/#modelcopy
destination = source.copy(deep=True)
destination = source.model_copy(deep=True)
destination.description = Markdown(description)
return self.patch(entity=entity, source=source, destination=destination)
@ -228,7 +228,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
table.tableConstraints = instance.tableConstraints
destination = table.copy(deep=True)
destination = table.model_copy(deep=True)
destination.tableConstraints = constraints
return self.patch(entity=Table, source=table, destination=destination)
@ -253,7 +253,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
if not source:
return None
destination = source.copy(deep=True)
destination = source.model_copy(deep=True)
destination.entityLink = EntityLink(entity_link)
if test_case_parameter_values:
@ -291,7 +291,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
# Initialize empty tag list or the last updated tags
source.tags = instance.tags or []
destination = source.copy(deep=True)
destination = source.model_copy(deep=True)
tag_fqns = {label.tagFQN.root for label in tag_labels}
@ -385,7 +385,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
# Make sure we run the patch against the last updated data from the API
table.columns = instance.columns
destination = table.copy(deep=True)
destination = table.model_copy(deep=True)
for column_tag in column_tags or []:
update_column_tags(destination.columns, column_tag, operation)
@ -472,7 +472,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
# Make sure we run the patch against the last updated data from the API
table.columns = instance.columns
destination = table.copy(deep=True)
destination = table.model_copy(deep=True)
update_column_description(destination.columns, column_descriptions, force)
patched_entity = self.patch(entity=Table, source=table, destination=destination)
@ -531,7 +531,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
:param life_cycle_data: Life Cycle data to add
"""
try:
destination = entity.copy(deep=True)
destination = entity.model_copy(deep=True)
destination.lifeCycle = life_cycle
return self.patch(
entity=type(entity), source=entity, destination=destination
@ -546,7 +546,7 @@ class OMetaPatchMixin(OMetaPatchMixinBase):
def patch_domain(self, entity: Entity, domain: Domain) -> Optional[Entity]:
"""Patch domain data for an Entity"""
try:
destination: Entity = entity.copy(deep=True)
destination: Entity = entity.model_copy(deep=True)
destination.domain = EntityReference(id=domain.id, type="domain")
return self.patch(
entity=type(entity), source=entity, destination=destination

View File

@ -60,7 +60,7 @@ class FileSink(Sink):
if self.wrote_something:
self.file.write(",\n")
self.file.write(record.json())
self.file.write(record.model_dump_json())
self.wrote_something = True
return Either(right=get_log_name(record))

View File

@ -205,7 +205,7 @@ class DashboardServiceSource(TopologyRunnerMixin, Source, ABC):
config: WorkflowSource
metadata: OpenMetadata
# Big union of types we want to fetch dynamically
service_connection: DashboardConnection.__fields__["config"].annotation
service_connection: DashboardConnection.model_fields["config"].annotation
topology = DashboardServiceTopology()
context = TopologyContextManager(topology)
@ -608,7 +608,7 @@ class DashboardServiceSource(TopologyRunnerMixin, Source, ABC):
"""
patch_request = PatchRequest(
original_entity=original_entity,
new_entity=original_entity.copy(update=create_request.__dict__),
new_entity=original_entity.model_copy(update=create_request.__dict__),
)
if isinstance(original_entity, Dashboard):
# For patch the charts need to be entity ref instead of fqn

View File

@ -18,7 +18,7 @@ from time import sleep
from typing import List, Optional, Tuple
import msal
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from metadata.generated.schema.entity.services.connections.dashboard.powerBIConnection import (
PowerBIConnection,
@ -327,8 +327,7 @@ class PowerBiApiClient:
class PowerBiClient(BaseModel):
class Config:
arbitrary_types_allowed = True
model_config = ConfigDict(arbitrary_types_allowed=True)
api_client: PowerBiApiClient
file_client: Optional[PowerBiFileClient]

View File

@ -15,7 +15,7 @@ Tableau Source Model module
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Extra, Field, validator
from pydantic import BaseModel, ConfigDict, Field, validator
from metadata.generated.schema.entity.data.chart import ChartType
@ -25,8 +25,7 @@ class TableauBaseModel(BaseModel):
Tableau basic configurations
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
id: str
name: Optional[str] = None
@ -54,8 +53,7 @@ class TableauTag(BaseModel):
Aux class for Tag object of the tableau_api_lib response
"""
class Config:
frozen = True
model_config = ConfigDict(frozen=True)
label: str
@ -153,8 +151,7 @@ class TableauDashboard(TableauBaseModel):
Aux class for Dashboard object of the tableau_api_lib response
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
project: Optional[TableauBaseModel] = None
description: Optional[str] = None

View File

@ -223,7 +223,7 @@ class DatabaseServiceSource(
database_source_state: Set = set()
stored_procedure_source_state: Set = set()
# Big union of types we want to fetch dynamically
service_connection: DatabaseConnection.__fields__["config"].annotation
service_connection: DatabaseConnection.model_fields["config"].annotation
# When processing the database, the source will update the inspector if needed
inspector: Inspector

View File

@ -15,12 +15,11 @@ Domo Database Source Model module
from typing import List, Optional
from pydantic import BaseModel, Extra, Field
from pydantic import BaseModel, ConfigDict, Field
class DomoDatabaseBaseModel(BaseModel):
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
id: str
name: str

View File

@ -17,7 +17,7 @@ from datetime import datetime
from functools import lru_cache
from typing import Dict, Iterable, List, Optional, Type
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field
from sqlalchemy.engine import Engine
from metadata.generated.schema.entity.data.table import Table
@ -46,12 +46,11 @@ class LifeCycleQueryByTable(BaseModel):
Query executed get life cycle
"""
model_config = ConfigDict(populate_by_name=True)
table_name: str = Field(..., alias="TABLE_NAME")
created_at: Optional[datetime] = Field(None, alias="CREATED_AT")
class Config:
populate_by_name = True
class LifeCycleQueryMixin:
"""

View File

@ -591,7 +591,7 @@ class SasSource(
)
self.metadata.client.put(
path=f"{self.metadata.get_suffix(Table)}/{table_entity.id.root}/tableProfile",
data=table_profile_request.json(),
data=table_profile_request.model_dump_json(),
)
except Exception as exc:

View File

@ -16,7 +16,7 @@ Kinesis Models
from enum import Enum
from typing import List, Optional
from pydantic import BaseModel, Extra
from pydantic import BaseModel, ConfigDict
class KinesisEnum(Enum):
@ -66,8 +66,7 @@ class KinesisArgs(BaseModel):
Model for Kinesis API Arguments
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
Limit: int = 100
@ -77,8 +76,7 @@ class KinesisStreamArgs(BaseModel):
Model for Kinesis Stream API Arguments
"""
class Config:
extra = Extra.allow
model_config = ConfigDict(extra="allow")
StreamName: str

View File

@ -116,7 +116,7 @@ class MessagingServiceSource(TopologyRunnerMixin, Source, ABC):
source_config: MessagingServiceMetadataPipeline
config: WorkflowSource
# Big union of types we want to fetch dynamically
service_connection: MessagingConnection.__fields__["config"].annotation
service_connection: MessagingConnection.model_fields["config"].annotation
topology = MessagingServiceTopology()
context = TopologyContextManager(topology)

View File

@ -104,7 +104,7 @@ class MlModelServiceSource(TopologyRunnerMixin, Source, ABC):
source_config: MlModelServiceMetadataPipeline
config: WorkflowSource
# Big union of types we want to fetch dynamically
service_connection: MlModelConnection.__fields__["config"].annotation
service_connection: MlModelConnection.model_fields["config"].annotation
topology = MlModelServiceTopology()
context = TopologyContextManager(topology)

View File

@ -73,7 +73,7 @@ from functools import singledispatch
from typing import Any, DefaultDict, Dict, List, Optional, Type
import attr
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from metadata.generated.schema.entity.data.table import Table
from metadata.ingestion.ometa.models import T
@ -129,12 +129,11 @@ class XLets(BaseModel):
Group inlets and outlets from all tasks in a DAG
"""
model_config = ConfigDict(arbitrary_types_allowed=True)
inlets: List[OMEntity]
outlets: List[OMEntity]
class Config:
arbitrary_types_allowed = True
def concat_dict_values(
dict_1: DefaultDict[str, List[Any]], dict_2: Optional[Dict[str, List[Any]]]

View File

@ -16,7 +16,7 @@ Tableau Source Model module
from datetime import datetime
from typing import Any, List, Optional
from pydantic import BaseModel, Extra, Field
from pydantic import BaseModel, ConfigDict, Field
class AirflowBaseModel(BaseModel):
@ -24,9 +24,7 @@ class AirflowBaseModel(BaseModel):
Tableau basic configurations
"""
class Config:
extra = Extra.allow
arbitrary_types_allowed = True
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
dag_id: str
@ -44,8 +42,7 @@ class AirflowTask(BaseModel):
owner: Optional[str] = None
# Allow picking up data from key `inlets` and `_inlets`
class Config:
populate_by_name = True
model_config = ConfigDict(populate_by_name=True)
class TaskList(BaseModel):

View File

@ -120,7 +120,7 @@ class PipelineServiceSource(TopologyRunnerMixin, Source, ABC):
source_config: PipelineServiceMetadataPipeline
config: WorkflowSource
# Big union of types we want to fetch dynamically
service_connection: PipelineConnection.__fields__["config"].annotation
service_connection: PipelineConnection.model_fields["config"].annotation
topology = PipelineServiceTopology()
context = TopologyContextManager(topology)

View File

@ -114,7 +114,7 @@ class SearchServiceSource(TopologyRunnerMixin, Source, ABC):
source_config: SearchServiceMetadataPipeline
config: WorkflowSource
# Big union of types we want to fetch dynamically
service_connection: SearchConnection.__fields__["config"].annotation
service_connection: SearchConnection.model_fields["config"].annotation
topology = SearchServiceTopology()
context = TopologyContextManager(topology)

View File

@ -121,7 +121,7 @@ class StorageServiceSource(TopologyRunnerMixin, Source, ABC):
config: WorkflowSource
metadata: OpenMetadata
# Big union of types we want to fetch dynamically
service_connection: StorageConnection.__fields__["config"].annotation
service_connection: StorageConnection.model_fields["config"].annotation
topology = StorageServiceTopology()
context = TopologyContextManager(topology)

View File

@ -17,6 +17,7 @@ multiple profilers per table and columns.
"""
from typing import List, Optional, Type, Union
from pydantic import ConfigDict
from sqlalchemy import Column
from sqlalchemy.orm import DeclarativeMeta
@ -124,10 +125,9 @@ class ProfilerResponse(ConfigModel):
class ThreadPoolMetrics(ConfigModel):
"""A container for all metrics to be computed on the same thread."""
model_config = ConfigDict(arbitrary_types_allowed=True)
metrics: Union[List[Union[Type[Metric], CustomMetric]], Type[Metric]]
metric_type: MetricTypes
column: Optional[Union[Column, SQALikeColumn]] = None
table: Union[Table, DeclarativeMeta]
class Config:
arbitrary_types_allowed = True

View File

@ -14,7 +14,7 @@ OpenMetadata source for the profiler
import traceback
from typing import Iterable, Optional, cast
from pydantic import BaseModel
from pydantic import BaseModel, ConfigDict
from metadata.generated.schema.entity.data.database import Database
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
@ -50,9 +50,7 @@ TAGS_FIELD = ["tags"]
class ProfilerSourceAndEntity(BaseModel):
"""Return class for the OpenMetadata Profiler Source"""
class Config:
arbitrary_types_allowed = True
extra = "forbid"
model_config = ConfigDict(extra="forbid", arbitrary_types_allowed=True)
profiler_source: ProfilerSource
entity: Table

View File

@ -26,7 +26,7 @@ def update_repository_name(
Given an original set of credentials and a new repository name,
return the updated credentials
"""
updated = original.copy(deep=True)
updated = original.model_copy(deep=True)
updated.repositoryName = RepositoryName(name)
return updated

View File

@ -39,6 +39,10 @@ def ignore_ssl_init(_: Optional[SslConfig]) -> bool:
@ssl_verification_registry.add(VerifySSL.validate.value)
def validate_ssl_init(ssl_config: Optional[SslConfig]) -> str:
if ssl_config is None:
raise InvalidSSLVerificationException(
"You have Verify SSL but the SSL Config is missing. Make sure to inform the CA Certificate path."
)
return ssl_config.root.caCertificate.get_secret_value()

View File

@ -103,7 +103,7 @@ class CliCommonDB:
self.get_profiler_time_partition_results()
)
if expected_profiler_time_partition_results:
table_profile = profile.profile.dict()
table_profile = profile.profile.model_dump()
for key in expected_profiler_time_partition_results["table_profile"]:
self.assertEqual(
table_profile[key],
@ -122,7 +122,7 @@ class CliCommonDB:
None,
)
if expected_column_profile:
column_profile = column.profile.dict()
column_profile = column.profile.model_dump()
for key in expected_column_profile: # type: ignore
if key == "nonParametricSkew":
self.assertEqual(

View File

@ -53,7 +53,7 @@ def ingest_mysql_service(
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": LogLevels.DEBUG.value,
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
metadata_ingestion = MetadataWorkflow.create(workflow_config)

View File

@ -130,7 +130,7 @@ class OMetaChartTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateChartRequest(**updated)

View File

@ -130,7 +130,7 @@ class OMetaDashboardTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateDashboardRequest(**updated)

View File

@ -135,7 +135,7 @@ class OMetaDatabaseTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateDatabaseRequest(**updated)

View File

@ -162,7 +162,7 @@ class OMetaModelTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateMlModelRequest(**updated)

View File

@ -142,7 +142,7 @@ class OMetaPipelineTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreatePipelineRequest(**updated)

View File

@ -243,7 +243,7 @@ class OMetaRolePolicyTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create_policy)
updated = self.create_policy.dict(exclude_unset=True)
updated = self.create_policy.model_dump(exclude_unset=True)
updated["rules"] = [self.rule_3]
updated_policy_entity = CreatePolicyRequest(**updated)
@ -463,7 +463,7 @@ class OMetaRolePolicyTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create_role)
updated = self.create_role.dict(exclude_unset=True)
updated = self.create_role.model_dump(exclude_unset=True)
updated["policies"] = [self.role_policy_2.name]
updated_entity = CreateRoleRequest(**updated)

View File

@ -129,7 +129,7 @@ class OMetaObjectStoreTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateContainerRequest(**updated)

View File

@ -231,7 +231,7 @@ class OMetaTableTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateTableRequest(**updated)

View File

@ -130,7 +130,7 @@ class OMetaTopicTest(TestCase):
res_create = self.metadata.create_or_update(data=self.create)
updated = self.create.dict(exclude_unset=True)
updated = self.create.model_dump(exclude_unset=True)
updated["owner"] = self.owner
updated_entity = CreateTopicRequest(**updated)

View File

@ -133,7 +133,7 @@ def test_incompatible_column_type(ingest_metadata, metadata: OpenMetadata, db_se
"entityFullyQualifiedName": f"{db_service.fullyQualifiedName.root}.dvdrental.public.customer",
}
},
"serviceConnection": db_service.connection.dict(),
"serviceConnection": db_service.connection.model_dump(),
},
"processor": {
"type": "orm-test-runner",
@ -166,7 +166,7 @@ def test_incompatible_column_type(ingest_metadata, metadata: OpenMetadata, db_se
},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
test_suite_procesor = TestSuiteWorkflow.create(workflow_config)

View File

@ -145,7 +145,7 @@ def test_ingest_query_log(db_service, ingest_metadata, metadata: OpenMetadata):
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
metadata_ingestion = MetadataWorkflow.create(workflow_config)
@ -199,7 +199,7 @@ def run_profiler_workflow(ingest_metadata, db_service, metadata):
loggerLevel=LogLevels.DEBUG, openMetadataServerConfig=metadata.config
),
)
metadata_ingestion = ProfilerWorkflow.create(workflow_config.dict())
metadata_ingestion = ProfilerWorkflow.create(workflow_config.model_dump())
search_cache.clear()
metadata_ingestion.execute()
return
@ -212,7 +212,7 @@ def ingest_query_usage(ingest_metadata, db_service, metadata):
"source": {
"type": "postgres-usage",
"serviceName": db_service.fullyQualifiedName.root,
"serviceConnection": db_service.connection.dict(),
"serviceConnection": db_service.connection.model_dump(),
"sourceConfig": {
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
},
@ -233,7 +233,7 @@ def ingest_query_usage(ingest_metadata, db_service, metadata):
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
workflow = UsageWorkflow.create(workflow_config)
@ -278,7 +278,7 @@ def run_usage_workflow(db_service, metadata):
"source": {
"type": "postgres-usage",
"serviceName": db_service.fullyQualifiedName.root,
"serviceConnection": db_service.connection.dict(),
"serviceConnection": db_service.connection.model_dump(),
"sourceConfig": {
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
},
@ -299,7 +299,7 @@ def run_usage_workflow(db_service, metadata):
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
workflow = UsageWorkflow.create(workflow_config)
@ -317,7 +317,7 @@ def test_usage_delete_usage(db_service, ingest_postgres_lineage, metadata):
"source": {
"type": "postgres-usage",
"serviceName": db_service.fullyQualifiedName.root,
"serviceConnection": db_service.connection.dict(),
"serviceConnection": db_service.connection.model_dump(),
"sourceConfig": {
"config": {"type": DatabaseUsageConfigType.DatabaseUsage.value}
},
@ -338,7 +338,7 @@ def test_usage_delete_usage(db_service, ingest_postgres_lineage, metadata):
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
workflow = UsageWorkflow.create(workflow_config)

View File

@ -73,7 +73,7 @@ def test_sample_data(db_service, db_fqn, metadata):
},
"workflowConfig": {
"loggerLevel": LogLevels.DEBUG,
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
profiler_workflow = ProfilerWorkflow.create(workflow_config)

View File

@ -136,7 +136,7 @@ def ingest_metadata(mssql_container, metadata: OpenMetadata, request):
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "DEBUG",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
metadata_ingestion = MetadataWorkflow.create(workflow_config)
@ -183,7 +183,7 @@ def run_lineage_workflow(
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"loggerLevel": "INFO",
"openMetadataServerConfig": metadata.config.dict(),
"openMetadataServerConfig": metadata.config.model_dump(),
},
}
metadata_ingestion = MetadataWorkflow.create(workflow_config)

View File

@ -194,7 +194,7 @@ class EntityReportProcessorTest(unittest.TestCase):
flat_result.timestamp = Timestamp(1695324826495)
processed.append(flat_result)
assert all(
k in flat_result.data.dict()
k in flat_result.data.model_dump()
for k in [
"entityType",
"entityTier",

View File

@ -448,12 +448,12 @@ public class OpenMetadataApplication extends Application<OpenMetadataApplication
String maxMigration = Migration.lastMigrationFile(conf.getMigrationConfiguration());
if (lastMigrated.isEmpty()) {
throw new IllegalStateException(
"Could not validate Flyway migrations in the database. Make sure you have run `./bootstrap/bootstrap_storage.sh migrate-all` at least once.");
"Could not validate Flyway migrations in the database. Make sure you have run `./bootstrap/openmetadata-ops.sh migrate` at least once.");
}
if (lastMigrated.get().compareTo(maxMigration) < 0) {
throw new IllegalStateException(
"There are pending migrations to be run on the database."
+ " Please backup your data and run `./bootstrap/bootstrap_storage.sh migrate-all`."
+ " Please backup your data and run `./bootstrap/openmetadata-ops.sh migrate`."
+ " You can find more information on upgrading OpenMetadata at"
+ " https://docs.open-metadata.org/deployment/upgrade ");
}

View File

@ -30,7 +30,7 @@ public final class Migration {
} catch (StatementException e) {
throw new IllegalArgumentException(
"Exception encountered when trying to obtain last migrated Flyway version."
+ " Make sure you have run `./bootstrap/bootstrap_storage.sh migrate-all` at least once.",
+ " Make sure you have run `./bootstrap/openmetadata-ops.sh migrate` at least once.",
e);
}
}

View File

@ -64,7 +64,7 @@ public class MigrationWorkflow {
if (!migrations.isEmpty()) {
throw new IllegalStateException(
"There are pending migrations to be run on the database."
+ " Please backup your data and run `./bootstrap/bootstrap_storage.sh migrate-all`."
+ " Please backup your data and run `./bootstrap/openmetadata-ops.sh migrate`."
+ " You can find more information on upgrading OpenMetadata at"
+ " https://docs.open-metadata.org/deployment/upgrade ");
}