mirror of
				https://github.com/open-metadata/OpenMetadata.git
				synced 2025-10-31 18:48:35 +00:00 
			
		
		
		
	Fixes(ingestion/source/dbt): Handle None Type in get_tag_labels Function for DBT Metadata Processing (#16648)
* fix condition * fix * lint
This commit is contained in:
		
							parent
							
								
									de9822e7f3
								
							
						
					
					
						commit
						fc9033b953
					
				| @ -47,6 +47,7 @@ from metadata.utils.logger import utils_logger | |||||||
| 
 | 
 | ||||||
| logger = utils_logger() | logger = utils_logger() | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| # Similar inner methods with mode client. That's fine. | # Similar inner methods with mode client. That's fine. | ||||||
| # pylint: disable=duplicate-code | # pylint: disable=duplicate-code | ||||||
| class PowerBiApiClient: | class PowerBiApiClient: | ||||||
|  | |||||||
| @ -394,11 +394,14 @@ class DbtSource(DbtServiceSource): | |||||||
| 
 | 
 | ||||||
|                     dbt_table_tags_list = [] |                     dbt_table_tags_list = [] | ||||||
|                     if manifest_node.tags: |                     if manifest_node.tags: | ||||||
|                         dbt_table_tags_list = get_tag_labels( |                         dbt_table_tags_list = ( | ||||||
|                             metadata=self.metadata, |                             get_tag_labels( | ||||||
|                             tags=manifest_node.tags, |                                 metadata=self.metadata, | ||||||
|                             classification_name=self.tag_classification_name, |                                 tags=manifest_node.tags, | ||||||
|                             include_tags=self.source_config.includeTags, |                                 classification_name=self.tag_classification_name, | ||||||
|  |                                 include_tags=self.source_config.includeTags, | ||||||
|  |                             ) | ||||||
|  |                             or [] | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|                     if manifest_node.meta: |                     if manifest_node.meta: | ||||||
|  | |||||||
| @ -30,7 +30,6 @@ class KafkaSource(CommonBrokerSource): | |||||||
|         self.ssl_manager = None |         self.ssl_manager = None | ||||||
|         service_connection = cast(KafkaConnection, config.serviceConnection.root.config) |         service_connection = cast(KafkaConnection, config.serviceConnection.root.config) | ||||||
|         if service_connection.schemaRegistrySSL: |         if service_connection.schemaRegistrySSL: | ||||||
| 
 |  | ||||||
|             self.ssl_manager = SSLManager( |             self.ssl_manager = SSLManager( | ||||||
|                 ca=service_connection.schemaRegistrySSL.root.caCertificate, |                 ca=service_connection.schemaRegistrySSL.root.caCertificate, | ||||||
|                 key=service_connection.schemaRegistrySSL.root.sslKey, |                 key=service_connection.schemaRegistrySSL.root.sslKey, | ||||||
|  | |||||||
| @ -99,7 +99,6 @@ class DatabrickspipelineSource(PipelineServiceSource): | |||||||
|         """Method to Get Pipeline Entity""" |         """Method to Get Pipeline Entity""" | ||||||
|         self.context.get().job_id_list = [] |         self.context.get().job_id_list = [] | ||||||
|         try: |         try: | ||||||
| 
 |  | ||||||
|             description = pipeline_details["settings"].get("name") |             description = pipeline_details["settings"].get("name") | ||||||
|             pipeline_request = CreatePipelineRequest( |             pipeline_request = CreatePipelineRequest( | ||||||
|                 name=EntityName(str(pipeline_details["job_id"])), |                 name=EntityName(str(pipeline_details["job_id"])), | ||||||
|  | |||||||
| @ -201,7 +201,6 @@ class KafkaconnectSource(PipelineServiceSource): | |||||||
|             dataset_entity = self.get_dataset_entity(pipeline_details=pipeline_details) |             dataset_entity = self.get_dataset_entity(pipeline_details=pipeline_details) | ||||||
| 
 | 
 | ||||||
|             for topic in pipeline_details.topics or []: |             for topic in pipeline_details.topics or []: | ||||||
| 
 |  | ||||||
|                 topic_fqn = fqn.build( |                 topic_fqn = fqn.build( | ||||||
|                     metadata=self.metadata, |                     metadata=self.metadata, | ||||||
|                     entity_type=Topic, |                     entity_type=Topic, | ||||||
|  | |||||||
| @ -70,7 +70,6 @@ def get_docker_network(name: str): | |||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_mysql_container(mysql_config: MySqlContainerConfigs): | def get_mysql_container(mysql_config: MySqlContainerConfigs): | ||||||
| 
 |  | ||||||
|     container = MySqlContainer( |     container = MySqlContainer( | ||||||
|         **{ |         **{ | ||||||
|             k: v |             k: v | ||||||
|  | |||||||
| @ -113,7 +113,6 @@ def create_data(mlflow_environment): | |||||||
| 
 | 
 | ||||||
|         # Model registry does not work with file store |         # Model registry does not work with file store | ||||||
|         if tracking_url_type_store != "file": |         if tracking_url_type_store != "file": | ||||||
| 
 |  | ||||||
|             # Register the model |             # Register the model | ||||||
|             # There are other ways to use the Model Registry, which depends on the use case, |             # There are other ways to use the Model Registry, which depends on the use case, | ||||||
|             # please refer to the doc for more information: |             # please refer to the doc for more information: | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Trs
						Trs