mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-07-10 10:40:42 +00:00

* Add multiple owners * Multi Ownership * Issue #17012: Multi User/Team Ownership * Issue #17012: Multi User/Team Ownership * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 1 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 2 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 3 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 4 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 5 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 6 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 7 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 8 * Add Migrations for Owner Thread * update ingestion for multi owner * fix pytests * fixed checkstyle * Add Alert Name to Publishers (#17108) * Add Alert Name to Publishers * Fix Test * Add Bound to Setuptools (#17105) * Minor: fixed testSummaryGraph issue (#17115) * feat: updated multi pipeline ui as per new mock (#17106) * feat: updated multi pipeline ui as per new mock * translation sync * fixed failing unit test * fixed playwright test * fixed viewService click issue * sorted pipeline based on test case length * Added domo federated dataset support (#17061) * fix usernames (#17122) * Doc: Updated Doris & Redshift Docs (#17123) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * Fix #12677: Added Synapse Connector - docs and side docs (#17041) * Fix #17098: Fixed case sensitive partition column name in Bigquery (#17104) * Fixed case sensitive partiion col name bigquery * update test * #13876: change placement of comment and close button in task approval workflow (#17044) * change placment of comment and close button in task approval workflow * minor change * playwright test for the close and comment function * supported ref in activityFeedEditor * fix playwright test * added playwright test for data steward * fix the test for the data streward user * fix the close button not showing if task has no suggestions and icon fixes * fix sonar issue * change glossary and add suggestion button to dropdown button * fix the glossary failure due to button change * icon change for add tag and description * fix glossary cypress failure due to button chnages * changes as per comments * MINOR: docs links fix (#17125) * alation link fix * dbt yaml config source link fix * bigquery doc fix * Explore tree feedbacks (#17078) * fix explore design * update switcher icon * show menu when search query exists * fix selection of active service * fix type error * fix tests * fix tests * fix tests * MINOR: Databricks view TableType fix (#17124) * Minor: fixed AUT test (#17128) * Fix #16692: Override Lineage Support for View & Dashboard Lineage (#17064) * #17065: fix the tags not rendering in selector after selection in edit tags task (#17107) * fix the tags not rendering in selector after selection in edit tags taks * added playwright test * minor changes * minor fix * fix the tags not updating in edit and accept tag * fix explore type changes for collate (#17131) * MINOR: changed log level to debug (#17126) * changed log level to debug * fixed type * changed type to optional * Get feed and count data of soft deleted user (#17135) * Doc: Adding OIDC Docs (#17139) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * Doc: Updating Profiler Workflow Docs URL (#17140) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * fix playwright and cypress (#17138) * Minor: fixed edit modal issue for sql test case (#17132) * Minor: fixed edit modal issue for sql test case * fixed test * Minor: Added whats new content for 1.4.6 release (#17148) * MINOR [GEN-799]: add option to disable manual trigger using scheduleType (#17031) * fix: raise for triggering system app * added scheduleType ScheduledOrManual * minor: remove "service" field from required properties in createAPIEndpoint schema (#17147) * initial commit multi ownership * update glossary and other entities * update owners * fix version pages * fix tests * Update entity_extension to move owner to array (#17200) * fix tests * fix api page errors * fix owner label design * locales * fix owners in elastic search source * fix types * fix tests * fix tests * Updated CustomMetric owner to entityReferenceList. (#17211) * Fix owners field in search mappings * fix search aggregates * fix inherited label * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 9 * Fix QUeries * Fix Mysql Queries * Typo * fix tests * fix tests * fix tests * fix advanced search constants * fix service ingestion tests * fix tests --------- Co-authored-by: mohitdeuex <mohit.y@deuexsolutions.com> Co-authored-by: Onkar Ravgan <onkar.10r@gmail.com> Co-authored-by: Mohit Yadav <105265192+mohityadav766@users.noreply.github.com> Co-authored-by: Ayush Shah <ayush@getcollate.io> Co-authored-by: Shailesh Parmar <shailesh.parmar.webdev@gmail.com> Co-authored-by: k.nakagaki <141020064+nakaken-churadata@users.noreply.github.com> Co-authored-by: Prajwal214 <167504578+Prajwal214@users.noreply.github.com> Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> Co-authored-by: Suman Maharana <sumanmaharana786@gmail.com> Co-authored-by: Ashish Gupta <ashish@getcollate.io> Co-authored-by: harshsoni2024 <64592571+harshsoni2024@users.noreply.github.com> Co-authored-by: Karan Hotchandani <33024356+karanh37@users.noreply.github.com> Co-authored-by: Mayur Singal <39544459+ulixius9@users.noreply.github.com> Co-authored-by: Imri Paran <imri.paran@gmail.com> Co-authored-by: sonika-shah <58761340+sonika-shah@users.noreply.github.com> Co-authored-by: Sachin Chaurasiya <sachinchaurasiyachotey87@gmail.com> Co-authored-by: karanh37 <karanh37@gmail.com> Co-authored-by: Siddhant <86899184+Siddhanttimeline@users.noreply.github.com>
370 lines
12 KiB
Python
370 lines
12 KiB
Python
# Copyright 2021 Collate
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
# you may not use this file except in compliance with the License.
|
|
# You may obtain a copy of the License at
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
"""
|
|
Test databricks using the topology
|
|
"""
|
|
|
|
from unittest import TestCase
|
|
from unittest.mock import patch
|
|
|
|
from metadata.generated.schema.api.data.createDatabaseSchema import (
|
|
CreateDatabaseSchemaRequest,
|
|
)
|
|
from metadata.generated.schema.api.data.createTable import CreateTableRequest
|
|
from metadata.generated.schema.entity.data.database import Database
|
|
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
|
|
from metadata.generated.schema.entity.data.table import Column, DataType, TableType
|
|
from metadata.generated.schema.entity.services.databaseService import (
|
|
DatabaseConnection,
|
|
DatabaseService,
|
|
DatabaseServiceType,
|
|
)
|
|
from metadata.generated.schema.metadataIngestion.workflow import (
|
|
OpenMetadataWorkflowConfig,
|
|
)
|
|
from metadata.generated.schema.type.basic import FullyQualifiedEntityName
|
|
from metadata.generated.schema.type.entityReference import EntityReference
|
|
from metadata.ingestion.ometa.utils import model_str
|
|
from metadata.ingestion.source.database.databricks.metadata import DatabricksSource
|
|
|
|
# pylint: disable=line-too-long
|
|
mock_databricks_config = {
|
|
"source": {
|
|
"type": "databricks",
|
|
"serviceName": "local_datalake",
|
|
"serviceConnection": {
|
|
"config": {
|
|
"type": "Databricks",
|
|
"catalog": "hive_metastore",
|
|
"databaseSchema": "default",
|
|
"token": "123sawdtesttoken",
|
|
"hostPort": "localhost:443",
|
|
"connectionArguments": {"http_path": "/sql/1.0/warehouses/abcdedfg"},
|
|
}
|
|
},
|
|
"sourceConfig": {
|
|
"config": {
|
|
"type": "DatabaseMetadata",
|
|
"schemaFilterPattern": {"excludes": []},
|
|
}
|
|
},
|
|
},
|
|
"sink": {"type": "metadata-rest", "config": {}},
|
|
"workflowConfig": {
|
|
"openMetadataServerConfig": {
|
|
"hostPort": "http://localhost:8585/api",
|
|
"authProvider": "openmetadata",
|
|
"securityConfig": {
|
|
"jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg"
|
|
},
|
|
}
|
|
},
|
|
}
|
|
|
|
|
|
MOCK_TABLE = {
|
|
"id": "2d725b6e-1588-4814-9d8b-eff384cd1053",
|
|
"name": "DataSet Input",
|
|
"description": "this is a description for dataset input",
|
|
"rows": 99,
|
|
"columns": 10,
|
|
"schema": {
|
|
"columns": [
|
|
{"type": "DOUBLE", "name": "amount"},
|
|
{"type": "DOUBLE", "name": "bank_transfer_amount"},
|
|
{"type": "DOUBLE", "name": "coupon_amount"},
|
|
{"type": "DOUBLE", "name": "credit_card_amount"},
|
|
]
|
|
},
|
|
"owner": {"id": 1027954122, "name": "Nihar Doshi"},
|
|
"dataCurrentAt": "2022-10-18T05:30:06Z",
|
|
"createdAt": "2022-10-17T05:52:21Z",
|
|
"updatedAt": "2022-10-18T05:30:07Z",
|
|
"pdpEnabled": False,
|
|
"policies": [
|
|
{
|
|
"id": 17,
|
|
"type": "open",
|
|
"name": "All Rows",
|
|
"filters": [],
|
|
"users": [],
|
|
"virtualUsers": [],
|
|
"groups": [],
|
|
}
|
|
],
|
|
}
|
|
|
|
MOCK_TABLE_2 = {
|
|
"id": "3df43ed7-5f2f-46bb-9793-384c6374a81d",
|
|
"name": "growth data",
|
|
"description": "company growth data",
|
|
"rows": 5,
|
|
"columns": 2,
|
|
"schema": {
|
|
"columns": [
|
|
{"type": "ARRAY", "name": "quarters.result"},
|
|
{"type": "NUMBER", "name": "profit"},
|
|
]
|
|
},
|
|
"owner": {"id": 6024954162, "name": "Sam"},
|
|
"dataCurrentAt": "2024-07-15T05:30:06Z",
|
|
"createdAt": "2024-07-15T05:52:21Z",
|
|
"updatedAt": "2024-07-15T05:30:07Z",
|
|
}
|
|
|
|
EXPTECTED_TABLE_2 = [
|
|
CreateTableRequest(
|
|
name="growth data",
|
|
displayName="growth data",
|
|
description="company growth data",
|
|
tableType=TableType.Regular.value,
|
|
columns=[
|
|
Column(
|
|
name="quarters.result",
|
|
dataType=DataType.ARRAY.value,
|
|
),
|
|
Column(
|
|
name="profit",
|
|
dataType=DataType.NUMBER.value,
|
|
),
|
|
],
|
|
databaseSchema=FullyQualifiedEntityName(
|
|
"local_databricks.hive_metastore.do_it_all_with_default_schema"
|
|
),
|
|
)
|
|
]
|
|
|
|
EXPECTED_DATABASE_NAMES = ["hive_metastore"]
|
|
EXPECTED_DATABASE_SCHEMA_NAMES = ["default"]
|
|
|
|
MOCK_DATABASE_SERVICE = DatabaseService(
|
|
id="85811038-099a-11ed-861d-0242ac120002",
|
|
name="local_databricks",
|
|
connection=DatabaseConnection(),
|
|
serviceType=DatabaseServiceType.Databricks,
|
|
)
|
|
|
|
MOCK_DATABASE = Database(
|
|
id="a4e2f4aa-10af-4d4b-a85b-5daad6f70720",
|
|
name="hive_metastore",
|
|
fullyQualifiedName="local_databricks.hive_metastore",
|
|
displayName="hive_metastore",
|
|
description="",
|
|
service=EntityReference(
|
|
id="85811038-099a-11ed-861d-0242ac120002", type="databaseService"
|
|
),
|
|
)
|
|
|
|
MOCK_DATABASE_SCHEMA = DatabaseSchema(
|
|
id="ec5be98f-917c-44be-b178-47b3237ef648",
|
|
name="do_it_all_with_default_schema",
|
|
fullyQualifiedName="domodatabase_source.do_it_all_with_default_config.do_it_all_with_default_schema",
|
|
service=EntityReference(id="ec5be98f-917c-44be-b178-47b3237ef648", type="database"),
|
|
database=EntityReference(
|
|
id="a4e2f4aa-10af-4d4b-a85b-5daad6f70720",
|
|
type="database",
|
|
),
|
|
)
|
|
|
|
|
|
EXPTECTED_DATABASE_SCHEMA = [
|
|
CreateDatabaseSchemaRequest(
|
|
name="do_it_all_with_default_schema",
|
|
displayName=None,
|
|
description=None,
|
|
owners=None,
|
|
database="local_databricks.hive_metastore",
|
|
)
|
|
]
|
|
|
|
|
|
EXPTECTED_TABLE = [
|
|
CreateTableRequest(
|
|
name="newtable",
|
|
displayName="newtable",
|
|
description="this is a description for dataset input",
|
|
tableType=TableType.Regular.value,
|
|
columns=[
|
|
Column(
|
|
name="amount",
|
|
displayName=None,
|
|
dataType=DataType.DOUBLE.value,
|
|
arrayDataType=None,
|
|
dataLength=None,
|
|
precision=None,
|
|
scale=None,
|
|
dataTypeDisplay=None,
|
|
description="",
|
|
fullyQualifiedName=None,
|
|
tags=None,
|
|
constraint=None,
|
|
ordinalPosition=1,
|
|
jsonSchema=None,
|
|
children=None,
|
|
customMetrics=None,
|
|
profile=None,
|
|
),
|
|
Column(
|
|
name="bank_transfer_amount",
|
|
displayName=None,
|
|
dataType=DataType.DOUBLE.value,
|
|
arrayDataType=None,
|
|
dataLength=None,
|
|
precision=None,
|
|
scale=None,
|
|
dataTypeDisplay=None,
|
|
description="",
|
|
fullyQualifiedName=None,
|
|
tags=None,
|
|
constraint=None,
|
|
ordinalPosition=2,
|
|
jsonSchema=None,
|
|
children=None,
|
|
customMetrics=None,
|
|
profile=None,
|
|
),
|
|
Column(
|
|
name="coupon_amount",
|
|
displayName=None,
|
|
dataType=DataType.DOUBLE.value,
|
|
arrayDataType=None,
|
|
dataLength=None,
|
|
precision=None,
|
|
scale=None,
|
|
dataTypeDisplay=None,
|
|
description="",
|
|
fullyQualifiedName=None,
|
|
tags=None,
|
|
constraint=None,
|
|
ordinalPosition=3,
|
|
jsonSchema=None,
|
|
children=None,
|
|
customMetrics=None,
|
|
profile=None,
|
|
),
|
|
Column(
|
|
name="credit_card_amount",
|
|
displayName=None,
|
|
dataType=DataType.DOUBLE.value,
|
|
arrayDataType=None,
|
|
dataLength=None,
|
|
precision=None,
|
|
scale=None,
|
|
dataTypeDisplay=None,
|
|
description="",
|
|
fullyQualifiedName=None,
|
|
tags=None,
|
|
constraint=None,
|
|
ordinalPosition=4,
|
|
jsonSchema=None,
|
|
children=None,
|
|
customMetrics=None,
|
|
profile=None,
|
|
),
|
|
],
|
|
tableConstraints=None,
|
|
tablePartition=None,
|
|
tableProfilerConfig=None,
|
|
owners=None,
|
|
databaseSchema=FullyQualifiedEntityName(
|
|
"local_databricks.hive_metastore.do_it_all_with_default_schema"
|
|
),
|
|
tags=None,
|
|
schemaDefinition=None,
|
|
extension=None,
|
|
)
|
|
]
|
|
|
|
|
|
class DatabricksUnitTest(TestCase):
|
|
"""
|
|
Databricks unit tests
|
|
"""
|
|
|
|
@patch(
|
|
"metadata.ingestion.source.database.common_db_source.CommonDbSourceService.test_connection"
|
|
)
|
|
@patch(
|
|
"metadata.ingestion.source.database.databricks.metadata.DatabricksSource._init_version"
|
|
)
|
|
def __init__(self, methodName, test_connection, db_init_version) -> None:
|
|
super().__init__(methodName)
|
|
test_connection.return_value = False
|
|
db_init_version.return_value = None
|
|
|
|
self.config = OpenMetadataWorkflowConfig.model_validate(mock_databricks_config)
|
|
self.databricks_source = DatabricksSource.create(
|
|
mock_databricks_config["source"],
|
|
self.config.workflowConfig.openMetadataServerConfig,
|
|
)
|
|
self.databricks_source.context.get().__dict__[
|
|
"database"
|
|
] = MOCK_DATABASE.name.root
|
|
self.databricks_source.context.get().__dict__[
|
|
"database_service"
|
|
] = MOCK_DATABASE_SERVICE.name.root
|
|
|
|
self.databricks_source.context.get().__dict__[
|
|
"database_schema"
|
|
] = MOCK_DATABASE_SCHEMA.name.root
|
|
|
|
def test_database_schema_names(self):
|
|
assert EXPECTED_DATABASE_SCHEMA_NAMES == list(
|
|
self.databricks_source.get_database_schema_names()
|
|
)
|
|
|
|
def test_raw_database_schema_names(self):
|
|
assert EXPECTED_DATABASE_SCHEMA_NAMES == list(
|
|
self.databricks_source.get_raw_database_schema_names()
|
|
)
|
|
|
|
def test_yield_schema(self):
|
|
schema_list = []
|
|
yield_schemas = self.databricks_source.yield_database_schema(
|
|
schema_name=model_str(MOCK_DATABASE_SCHEMA.name)
|
|
)
|
|
|
|
for schema in yield_schemas:
|
|
if isinstance(schema, CreateDatabaseSchemaRequest):
|
|
schema_list.append(schema)
|
|
|
|
for _, (exptected, original) in enumerate(
|
|
zip(EXPTECTED_DATABASE_SCHEMA, schema_list)
|
|
):
|
|
self.assertEqual(exptected, original)
|
|
|
|
def test_yield_table(self):
|
|
table_list = []
|
|
yield_tables = self.databricks_source.yield_table(
|
|
("2d725b6e-1588-4814-9d8b-eff384cd1053", "Regular")
|
|
)
|
|
|
|
for table in yield_tables:
|
|
if isinstance(table, CreateTableRequest):
|
|
table_list.append(table)
|
|
|
|
for _, (expected, original) in enumerate(zip(EXPTECTED_TABLE, table_list)):
|
|
self.assertEqual(expected, original)
|
|
|
|
def test_yield_table_2(self):
|
|
table_list = []
|
|
yield_tables = self.databricks_source.yield_table(
|
|
("3df43ed7-5f2f-46bb-9793-384c6374a81d", "Regular")
|
|
)
|
|
|
|
for table in yield_tables:
|
|
if isinstance(table, CreateTableRequest):
|
|
table_list.append(table)
|
|
|
|
for _, (expected, original) in enumerate(zip(EXPTECTED_TABLE_2, table_list)):
|
|
self.assertEqual(expected, original)
|