Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

153 lines
5.4 KiB
Python
Raw Permalink Normal View History

# Copyright 2025 Collate
# Licensed under the Collate Community License, Version 1.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://github.com/open-metadata/OpenMetadata/blob/main/ingestion/LICENSE
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test flink using the topology
"""
from unittest import TestCase
from unittest.mock import patch
from metadata.generated.schema.api.data.createPipeline import CreatePipelineRequest
from metadata.generated.schema.entity.services.pipelineService import (
PipelineConnection,
PipelineService,
PipelineServiceType,
)
from metadata.generated.schema.metadataIngestion.workflow import (
OpenMetadataWorkflowConfig,
)
from metadata.generated.schema.type.basic import (
EntityName,
FullyQualifiedEntityName,
SourceUrl,
)
from metadata.ingestion.source.pipeline.flink.metadata import FlinkSource
from metadata.ingestion.source.pipeline.flink.models import FlinkPipeline
mock_flink_config = {
"source": {
"type": "flink",
"serviceName": "flink_test",
"serviceConnection": {
"config": {"type": "Flink", "hostPort": "http://127.0.0.1:8081"}
},
"sourceConfig": {"config": {"type": "PipelineMetadata"}},
},
"sink": {"type": "metadata-rest", "config": {}},
"workflowConfig": {
"openMetadataServerConfig": {
"hostPort": "http://localhost:8585/api",
"authProvider": "openmetadata",
"securityConfig": {
"jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg"
},
}
},
}
MOCK_PIPELINE_SERVICE = PipelineService(
id="85811038-099a-11ed-861d-0242ac120002",
name="flink_test",
fullyQualifiedName=FullyQualifiedEntityName("flink_test"),
connection=PipelineConnection(),
serviceType=PipelineServiceType.Flink,
)
MOCK_PIPELINE = FlinkPipeline(
jid="2aaa012e-099a-11ed-861d-0242ac120002",
name="alphabet",
state="Completed",
start_time=1718948457617,
end_time=1718948457663,
)
MOCK_JOB = {
"jid": "14f093794bf421e732aa6750d15ce5a6",
"name": "WordCount",
"start-time": 1718926965658,
"end-time": 1718926965903,
"duration": 245,
"state": "FINISHED",
"last-modification": 1718926965903,
"tasks": {
"running": 0,
"canceling": 0,
"canceled": 0,
"total": 2,
"created": 0,
"scheduled": 0,
"deploying": 0,
"reconciling": 0,
"finished": 2,
"initializing": 0,
"failed": 0,
},
}
EXPECTED_PIPELINE_NAME = "alphabet"
EXPECTED_PIPELINE = [
CreatePipelineRequest(
name=EntityName(root="alphabet"),
displayName=None,
description=None,
dataProducts=None,
sourceUrl=SourceUrl(
root="<MagicMock name='get_connection().config.hostPort' id='4883082864'>/#/job/running/None/overview"
),
concurrency=None,
pipelineLocation=None,
startDate=None,
tasks=[],
tags=None,
Issue #17012: Multi User/Team Ownership (#17013) * Add multiple owners * Multi Ownership * Issue #17012: Multi User/Team Ownership * Issue #17012: Multi User/Team Ownership * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 1 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 2 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 3 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 4 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 5 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 6 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 7 * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 8 * Add Migrations for Owner Thread * update ingestion for multi owner * fix pytests * fixed checkstyle * Add Alert Name to Publishers (#17108) * Add Alert Name to Publishers * Fix Test * Add Bound to Setuptools (#17105) * Minor: fixed testSummaryGraph issue (#17115) * feat: updated multi pipeline ui as per new mock (#17106) * feat: updated multi pipeline ui as per new mock * translation sync * fixed failing unit test * fixed playwright test * fixed viewService click issue * sorted pipeline based on test case length * Added domo federated dataset support (#17061) * fix usernames (#17122) * Doc: Updated Doris & Redshift Docs (#17123) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * Fix #12677: Added Synapse Connector - docs and side docs (#17041) * Fix #17098: Fixed case sensitive partition column name in Bigquery (#17104) * Fixed case sensitive partiion col name bigquery * update test * #13876: change placement of comment and close button in task approval workflow (#17044) * change placment of comment and close button in task approval workflow * minor change * playwright test for the close and comment function * supported ref in activityFeedEditor * fix playwright test * added playwright test for data steward * fix the test for the data streward user * fix the close button not showing if task has no suggestions and icon fixes * fix sonar issue * change glossary and add suggestion button to dropdown button * fix the glossary failure due to button change * icon change for add tag and description * fix glossary cypress failure due to button chnages * changes as per comments * MINOR: docs links fix (#17125) * alation link fix * dbt yaml config source link fix * bigquery doc fix * Explore tree feedbacks (#17078) * fix explore design * update switcher icon * show menu when search query exists * fix selection of active service * fix type error * fix tests * fix tests * fix tests * MINOR: Databricks view TableType fix (#17124) * Minor: fixed AUT test (#17128) * Fix #16692: Override Lineage Support for View & Dashboard Lineage (#17064) * #17065: fix the tags not rendering in selector after selection in edit tags task (#17107) * fix the tags not rendering in selector after selection in edit tags taks * added playwright test * minor changes * minor fix * fix the tags not updating in edit and accept tag * fix explore type changes for collate (#17131) * MINOR: changed log level to debug (#17126) * changed log level to debug * fixed type * changed type to optional * Get feed and count data of soft deleted user (#17135) * Doc: Adding OIDC Docs (#17139) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * Doc: Updating Profiler Workflow Docs URL (#17140) Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> * fix playwright and cypress (#17138) * Minor: fixed edit modal issue for sql test case (#17132) * Minor: fixed edit modal issue for sql test case * fixed test * Minor: Added whats new content for 1.4.6 release (#17148) * MINOR [GEN-799]: add option to disable manual trigger using scheduleType (#17031) * fix: raise for triggering system app * added scheduleType ScheduledOrManual * minor: remove "service" field from required properties in createAPIEndpoint schema (#17147) * initial commit multi ownership * update glossary and other entities * update owners * fix version pages * fix tests * Update entity_extension to move owner to array (#17200) * fix tests * fix api page errors * fix owner label design * locales * fix owners in elastic search source * fix types * fix tests * fix tests * Updated CustomMetric owner to entityReferenceList. (#17211) * Fix owners field in search mappings * fix search aggregates * fix inherited label * Issue #17012: Multi User/Team Ownership - Fix Tests - Part 9 * Fix QUeries * Fix Mysql Queries * Typo * fix tests * fix tests * fix tests * fix advanced search constants * fix service ingestion tests * fix tests --------- Co-authored-by: mohitdeuex <mohit.y@deuexsolutions.com> Co-authored-by: Onkar Ravgan <onkar.10r@gmail.com> Co-authored-by: Mohit Yadav <105265192+mohityadav766@users.noreply.github.com> Co-authored-by: Ayush Shah <ayush@getcollate.io> Co-authored-by: Shailesh Parmar <shailesh.parmar.webdev@gmail.com> Co-authored-by: k.nakagaki <141020064+nakaken-churadata@users.noreply.github.com> Co-authored-by: Prajwal214 <167504578+Prajwal214@users.noreply.github.com> Co-authored-by: Prajwal Pandit <prajwalpandit@Prajwals-MacBook-Air.local> Co-authored-by: Suman Maharana <sumanmaharana786@gmail.com> Co-authored-by: Ashish Gupta <ashish@getcollate.io> Co-authored-by: harshsoni2024 <64592571+harshsoni2024@users.noreply.github.com> Co-authored-by: Karan Hotchandani <33024356+karanh37@users.noreply.github.com> Co-authored-by: Mayur Singal <39544459+ulixius9@users.noreply.github.com> Co-authored-by: Imri Paran <imri.paran@gmail.com> Co-authored-by: sonika-shah <58761340+sonika-shah@users.noreply.github.com> Co-authored-by: Sachin Chaurasiya <sachinchaurasiyachotey87@gmail.com> Co-authored-by: karanh37 <karanh37@gmail.com> Co-authored-by: Siddhant <86899184+Siddhanttimeline@users.noreply.github.com>
2024-07-29 23:06:39 -07:00
owners=None,
service=FullyQualifiedEntityName(root="flink_test"),
extension=None,
scheduleInterval=None,
domain=None,
lifeCycle=None,
sourceHash=None,
)
]
class FlinkUnitTest(TestCase):
@patch(
"metadata.ingestion.source.pipeline.pipeline_service.PipelineServiceSource.test_connection"
)
@patch("metadata.ingestion.source.pipeline.flink.connection.get_connection")
def __init__(self, methodName, flink_client, test_connection) -> None:
super().__init__(methodName)
test_connection.return_value = False
config = OpenMetadataWorkflowConfig.model_validate(mock_flink_config)
self.flink = FlinkSource.create(
mock_flink_config["source"],
config.workflowConfig.openMetadataServerConfig,
)
self.flink.context.get().__dict__["pipeline"] = MOCK_PIPELINE.name
self.flink.context.get().__dict__[
"pipeline_service"
] = MOCK_PIPELINE_SERVICE.name.root
self.client = flink_client.return_value
def test_pipeline_name(self):
assert self.flink.get_pipeline_name(MOCK_PIPELINE) == EXPECTED_PIPELINE_NAME
def test_pipelines(self):
pipelines_list = []
results = self.flink.yield_pipeline(MOCK_PIPELINE)
for result in results:
pipelines_list.append(result.right)
for _, (expected, original) in enumerate(
zip(EXPECTED_PIPELINE, pipelines_list)
):
expected.sourceUrl = original.sourceUrl
self.assertEqual(expected, original)