diff --git a/bootstrap/sql/com.mysql.cj.jdbc.Driver/v012__create_db_connection_info.sql b/bootstrap/sql/com.mysql.cj.jdbc.Driver/v012__create_db_connection_info.sql index 946bdca8a51..434ae754f33 100644 --- a/bootstrap/sql/com.mysql.cj.jdbc.Driver/v012__create_db_connection_info.sql +++ b/bootstrap/sql/com.mysql.cj.jdbc.Driver/v012__create_db_connection_info.sql @@ -3,6 +3,7 @@ UPDATE metadata_service_entity SET json = JSON_REMOVE(json, '$.openMetadataServerConnection.secretsManagerCredentials') where name = 'OpenMetadata'; + -- Rename githubCredentials to gitCredentials UPDATE dashboard_service_entity SET json = JSON_INSERT( @@ -12,3 +13,30 @@ SET json = JSON_INSERT( ) WHERE serviceType = 'Looker' AND JSON_EXTRACT(json, '$.connection.config.githubCredentials') IS NOT NULL; + + +-- Rename gcsConfig in BigQuery to gcpConfig +UPDATE dbservice_entity +SET json = JSON_INSERT( + JSON_REMOVE(json, '$.connection.config.credentials.gcsConfig'), + '$.connection.config.credentials.gcpConfig', + JSON_EXTRACT(json, '$.connection.config.credentials.gcsConfig') +) where serviceType in ('BigQuery'); + +-- Rename gcsConfig in Datalake to gcpConfig +UPDATE dbservice_entity +SET json = JSON_INSERT( + JSON_REMOVE(json, '$.connection.config.configSource.securityConfig.gcsConfig'), + '$.connection.config.configSource.securityConfig.gcpConfig', + JSON_EXTRACT(json, '$.connection.config.configSource.securityConfig.gcsConfig') +) where serviceType in ('Datalake'); + + +-- Rename gcsConfig in dbt to gcpConfig +UPDATE ingestion_pipeline_entity +SET json = JSON_INSERT( + JSON_REMOVE(json, '$.sourceConfig.config.dbtConfigSource.dbtSecurityConfig.gcsConfig'), + '$.sourceConfig.config.dbtConfigdbtSecurityConfig.gcpConfig', + JSON_EXTRACT(json, '$.sourceConfig.config.dbtConfigSource.dbtSecurityConfig.gcsConfig') +) +WHERE json -> '$.sourceConfig.config.type' = 'DBT'; diff --git a/bootstrap/sql/org.postgresql.Driver/v012__create_db_connection_info.sql b/bootstrap/sql/org.postgresql.Driver/v012__create_db_connection_info.sql index f3516e5129b..6cc3072cdb4 100644 --- a/bootstrap/sql/org.postgresql.Driver/v012__create_db_connection_info.sql +++ b/bootstrap/sql/org.postgresql.Driver/v012__create_db_connection_info.sql @@ -3,8 +3,31 @@ UPDATE metadata_service_entity SET json = json::jsonb #- '{openMetadataServerConnection.secretsManagerCredentials}' where name = 'OpenMetadata'; + -- Rename githubCredentials to gitCredentials UPDATE dashboard_service_entity SET json = jsonb_set(json, '{connection,config,gitCredentials}', json#>'{connection,config,githubCredentials}') where serviceType = 'Looker' - and json#>'{connection,config,githubCredentials}' is not null; \ No newline at end of file + and json#>'{connection,config,githubCredentials}' is not null; + + +-- Rename gcsConfig in BigQuery to gcpConfig +UPDATE dbservice_entity +SET json = jsonb_set(json, '{connection,config,credentials,gcpConfig}', +json#>'{connection,config,credentials,gcsConfig}') +where serviceType in ('BigQuery') + and json#>'{connection,config,credentials,gcsConfig}' is not null; + + +-- Rename gcsConfig in Datalake to gcpConfig +UPDATE dbservice_entity +SET json = jsonb_set(json, '{connection,config,configSource,securityConfig,gcpConfig}', +json#>'{connection,config,configSource,securityConfig,gcsConfig}') +where serviceType in ('Datalake') + and json#>'{connection,config,configSource,securityConfig,gcsConfig}' is not null; + + +-- Rename gcsConfig in dbt to gcpConfig +UPDATE ingestion_pipeline_entity +SET json = jsonb_set(json::jsonb #- '{sourceConfig,config,dbtConfigSource,dbtSecurityConfig,gcsConfig}', '{sourceConfig,config,dbtConfigSource,dbtSecurityConfig,gcpConfig}', (json#>'{sourceConfig,config,dbtConfigSource,dbtSecurityConfig,gcsConfig}')::jsonb) +WHERE json#>>'{sourceConfig,config,dbtConfigSource,dbtSecurityConfig}' is not null and json#>>'{sourceConfig,config,dbtConfigSource,dbtSecurityConfig,gcsConfig}' is not null; diff --git a/ingestion/examples/sample_data/datasets/service.json b/ingestion/examples/sample_data/datasets/service.json index 919106497a7..644b740ce3b 100644 --- a/ingestion/examples/sample_data/datasets/service.json +++ b/ingestion/examples/sample_data/datasets/service.json @@ -6,7 +6,7 @@ "type": "BigQuery", "hostPort": "localhost:1234", "credentials": { - "gcsConfig": { + "gcpConfig": { "type": "service_account", "projectId": ["projectID"], "privateKeyId": "privateKeyId", diff --git a/ingestion/src/metadata/examples/workflows/bigquery.yaml b/ingestion/src/metadata/examples/workflows/bigquery.yaml index d05ec4b5fe7..fc0b66b9dd4 100644 --- a/ingestion/src/metadata/examples/workflows/bigquery.yaml +++ b/ingestion/src/metadata/examples/workflows/bigquery.yaml @@ -6,7 +6,7 @@ source: type: BigQuery taxonomyProjectID: [ project-id-where-policy-tags-exist ] credentials: - gcsConfig: + gcpConfig: type: service_account projectId: project_id privateKeyId: private_key_id diff --git a/ingestion/src/metadata/examples/workflows/bigquery_profiler.yaml b/ingestion/src/metadata/examples/workflows/bigquery_profiler.yaml index 3005270938f..707f9b7254e 100644 --- a/ingestion/src/metadata/examples/workflows/bigquery_profiler.yaml +++ b/ingestion/src/metadata/examples/workflows/bigquery_profiler.yaml @@ -5,7 +5,7 @@ source: config: type: BigQuery credentials: - gcsConfig: + gcpConfig: type: service_account projectId: my-project-id-1234 privateKeyId: privateKeyID diff --git a/ingestion/src/metadata/examples/workflows/bigquery_usage.yaml b/ingestion/src/metadata/examples/workflows/bigquery_usage.yaml index ec0474315b8..5f0c92293e3 100644 --- a/ingestion/src/metadata/examples/workflows/bigquery_usage.yaml +++ b/ingestion/src/metadata/examples/workflows/bigquery_usage.yaml @@ -5,7 +5,7 @@ source: config: type: BigQuery credentials: - gcsConfig: + gcpConfig: type: service_account projectId: project_id privateKeyId: private_key_id diff --git a/ingestion/src/metadata/examples/workflows/dbt.yaml b/ingestion/src/metadata/examples/workflows/dbt.yaml index ef4ab356fc0..e1fe2de0792 100644 --- a/ingestion/src/metadata/examples/workflows/dbt.yaml +++ b/ingestion/src/metadata/examples/workflows/dbt.yaml @@ -29,7 +29,7 @@ source: # dbtObjectPrefix: "dbt/" # # For GCS # dbtSecurityConfig: # These are modeled after all GCS credentials - # gcsConfig: + # gcpConfig: # type: My Type # projectId: project ID # privateKeyId: us-east-2 diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py b/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py index 6cd7afff9bf..88538b5dbfa 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/connection.py @@ -26,8 +26,8 @@ from metadata.generated.schema.entity.automations.workflow import ( from metadata.generated.schema.entity.services.connections.database.bigQueryConnection import ( BigQueryConnection, ) -from metadata.generated.schema.security.credentials.gcsValues import ( - GcsCredentialsValues, +from metadata.generated.schema.security.credentials.gcpValues import ( + GcpCredentialsValues, MultipleProjectId, SingleProjectId, ) @@ -52,22 +52,22 @@ def get_connection_url(connection: BigQueryConnection) -> str: environment variable when needed """ - if isinstance(connection.credentials.gcsConfig, GcsCredentialsValues): + if isinstance(connection.credentials.gcpConfig, GcpCredentialsValues): if isinstance( # pylint: disable=no-else-return - connection.credentials.gcsConfig.projectId, SingleProjectId + connection.credentials.gcpConfig.projectId, SingleProjectId ): - if not connection.credentials.gcsConfig.projectId.__root__: - return f"{connection.scheme.value}://{connection.credentials.gcsConfig.projectId or ''}" + if not connection.credentials.gcpConfig.projectId.__root__: + return f"{connection.scheme.value}://{connection.credentials.gcpConfig.projectId or ''}" if ( - not connection.credentials.gcsConfig.privateKey - and connection.credentials.gcsConfig.projectId.__root__ + not connection.credentials.gcpConfig.privateKey + and connection.credentials.gcpConfig.projectId.__root__ ): - project_id = connection.credentials.gcsConfig.projectId.__root__ + project_id = connection.credentials.gcpConfig.projectId.__root__ os.environ["GOOGLE_CLOUD_PROJECT"] = project_id - return f"{connection.scheme.value}://{connection.credentials.gcsConfig.projectId.__root__}" - elif isinstance(connection.credentials.gcsConfig.projectId, MultipleProjectId): - for project_id in connection.credentials.gcsConfig.projectId.__root__: - if not connection.credentials.gcsConfig.privateKey and project_id: + return f"{connection.scheme.value}://{connection.credentials.gcpConfig.projectId.__root__}" + elif isinstance(connection.credentials.gcpConfig.projectId, MultipleProjectId): + for project_id in connection.credentials.gcpConfig.projectId.__root__: + if not connection.credentials.gcpConfig.privateKey and project_id: # Setting environment variable based on project id given by user / set in ADC os.environ["GOOGLE_CLOUD_PROJECT"] = project_id return f"{connection.scheme.value}://{project_id}" @@ -78,9 +78,9 @@ def get_connection_url(connection: BigQueryConnection) -> str: def get_connection(connection: BigQueryConnection) -> Engine: """ - Prepare the engine and the GCS credentials + Prepare the engine and the GCP credentials """ - set_google_credentials(gcs_credentials=connection.credentials) + set_google_credentials(gcp_credentials=connection.credentials) return create_generic_db_connection( connection=connection, get_connection_url_fn=get_connection_url, diff --git a/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py b/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py index 11e47e8c263..84d76026d70 100644 --- a/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py +++ b/ingestion/src/metadata/ingestion/source/database/bigquery/metadata.py @@ -48,11 +48,11 @@ from metadata.generated.schema.entity.services.connections.metadata.openMetadata from metadata.generated.schema.metadataIngestion.workflow import ( Source as WorkflowSource, ) -from metadata.generated.schema.security.credentials.gcsCredentials import ( - GCSCredentialsPath, +from metadata.generated.schema.security.credentials.gcpCredentials import ( + GcpCredentialsPath, ) -from metadata.generated.schema.security.credentials.gcsValues import ( - GcsCredentialsValues, +from metadata.generated.schema.security.credentials.gcpValues import ( + GcpCredentialsValues, MultipleProjectId, SingleProjectId, ) @@ -316,9 +316,9 @@ class BigquerySource(CommonDbSourceService): def set_inspector(self, database_name: str): self.client = Client(project=database_name) if isinstance( - self.service_connection.credentials.gcsConfig, GcsCredentialsValues + self.service_connection.credentials.gcpConfig, GcpCredentialsValues ): - self.service_connection.credentials.gcsConfig.projectId = SingleProjectId( + self.service_connection.credentials.gcpConfig.projectId = SingleProjectId( __root__=database_name ) self.engine = get_connection(self.service_connection) @@ -326,19 +326,19 @@ class BigquerySource(CommonDbSourceService): def get_database_names(self) -> Iterable[str]: if isinstance( - self.service_connection.credentials.gcsConfig, GCSCredentialsPath + self.service_connection.credentials.gcpConfig, GcpCredentialsPath ): self.set_inspector(database_name=self.project_ids) yield self.project_ids elif isinstance( - self.service_connection.credentials.gcsConfig.projectId, SingleProjectId + self.service_connection.credentials.gcpConfig.projectId, SingleProjectId ): self.set_inspector(database_name=self.project_ids) yield self.project_ids elif hasattr( - self.service_connection.credentials.gcsConfig, "projectId" + self.service_connection.credentials.gcpConfig, "projectId" ) and isinstance( - self.service_connection.credentials.gcsConfig.projectId, MultipleProjectId + self.service_connection.credentials.gcpConfig.projectId, MultipleProjectId ): for project_id in self.project_ids: database_name = project_id diff --git a/ingestion/src/metadata/ingestion/source/database/datalake/connection.py b/ingestion/src/metadata/ingestion/source/database/datalake/connection.py index f7b753be4bc..6cbeb81f2ee 100644 --- a/ingestion/src/metadata/ingestion/source/database/datalake/connection.py +++ b/ingestion/src/metadata/ingestion/source/database/datalake/connection.py @@ -67,7 +67,7 @@ def _(config: S3Config): def _(config: GCSConfig): from google.cloud import storage - set_google_credentials(gcs_credentials=config.securityConfig) + set_google_credentials(gcp_credentials=config.securityConfig) gcs_client = storage.Client() return gcs_client diff --git a/ingestion/src/metadata/profiler/source/bigquery/profiler_source.py b/ingestion/src/metadata/profiler/source/bigquery/profiler_source.py index 300d550044b..641bbf036ed 100644 --- a/ingestion/src/metadata/profiler/source/bigquery/profiler_source.py +++ b/ingestion/src/metadata/profiler/source/bigquery/profiler_source.py @@ -22,8 +22,8 @@ from metadata.generated.schema.entity.services.databaseService import DatabaseSe from metadata.generated.schema.metadataIngestion.workflow import ( OpenMetadataWorkflowConfig, ) -from metadata.generated.schema.security.credentials.gcsValues import ( - GcsCredentialsValues, +from metadata.generated.schema.security.credentials.gcpValues import ( + GcpCredentialsValues, MultipleProjectId, SingleProjectId, ) @@ -50,11 +50,11 @@ class BigQueryProfilerSource(BaseProfilerSource): config.source.serviceConnection.__root__.config # type: ignore ) - if isinstance(config_copy.credentials.gcsConfig, GcsCredentialsValues): + if isinstance(config_copy.credentials.gcpConfig, GcpCredentialsValues): if isinstance( - config_copy.credentials.gcsConfig.projectId, MultipleProjectId + config_copy.credentials.gcpConfig.projectId, MultipleProjectId ): - config_copy.credentials.gcsConfig.projectId = SingleProjectId( + config_copy.credentials.gcpConfig.projectId = SingleProjectId( __root__=database.name.__root__ ) diff --git a/ingestion/src/metadata/utils/credentials.py b/ingestion/src/metadata/utils/credentials.py index 6a8f766e8ea..e29d17620fd 100644 --- a/ingestion/src/metadata/utils/credentials.py +++ b/ingestion/src/metadata/utils/credentials.py @@ -19,12 +19,12 @@ from typing import Dict from cryptography.hazmat.primitives import serialization -from metadata.generated.schema.security.credentials.gcsCredentials import ( - GCSCredentials, - GCSCredentialsPath, +from metadata.generated.schema.security.credentials.gcpCredentials import ( + GCPCredentials, + GcpCredentialsPath, ) -from metadata.generated.schema.security.credentials.gcsValues import ( - GcsCredentialsValues, +from metadata.generated.schema.security.credentials.gcpValues import ( + GcpCredentialsValues, ) from metadata.utils.logger import utils_logger @@ -33,9 +33,9 @@ logger = utils_logger() GOOGLE_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS" -class InvalidGcsConfigException(Exception): +class InvalidGcpConfigException(Exception): """ - Raised when we have errors trying to set GCS credentials + Raised when we have errors trying to set GCP credentials """ @@ -71,63 +71,63 @@ def create_credential_tmp_file(credentials: dict) -> str: return temp_file.name -def build_google_credentials_dict(gcs_values: GcsCredentialsValues) -> Dict[str, str]: +def build_google_credentials_dict(gcp_values: GcpCredentialsValues) -> Dict[str, str]: """ - Given GcsCredentialsValues, build a dictionary as the JSON file - downloaded from GCS with the service_account - :param gcs_values: GCS credentials + Given GcPCredentialsValues, build a dictionary as the JSON file + downloaded from GCP with the service_account + :param gcp_values: GCP credentials :return: Dictionary with credentials """ - private_key_str = gcs_values.privateKey.get_secret_value() + private_key_str = gcp_values.privateKey.get_secret_value() # adding the replace string here to escape line break if passed from env private_key_str = private_key_str.replace("\\n", "\n") validate_private_key(private_key_str) return { - "type": gcs_values.type, - "project_id": gcs_values.projectId.__root__, - "private_key_id": gcs_values.privateKeyId, + "type": gcp_values.type, + "project_id": gcp_values.projectId.__root__, + "private_key_id": gcp_values.privateKeyId, "private_key": private_key_str, - "client_email": gcs_values.clientEmail, - "client_id": gcs_values.clientId, - "auth_uri": str(gcs_values.authUri), - "token_uri": str(gcs_values.tokenUri), - "auth_provider_x509_cert_url": str(gcs_values.authProviderX509CertUrl), - "client_x509_cert_url": str(gcs_values.clientX509CertUrl), + "client_email": gcp_values.clientEmail, + "client_id": gcp_values.clientId, + "auth_uri": str(gcp_values.authUri), + "token_uri": str(gcp_values.tokenUri), + "auth_provider_x509_cert_url": str(gcp_values.authProviderX509CertUrl), + "client_x509_cert_url": str(gcp_values.clientX509CertUrl), } -def set_google_credentials(gcs_credentials: GCSCredentials) -> None: +def set_google_credentials(gcp_credentials: GCPCredentials) -> None: """ - Set GCS credentials environment variable - :param gcs_credentials: GCSCredentials + Set GCP credentials environment variable + :param gcp_credentials: GCPCredentials """ - if isinstance(gcs_credentials.gcsConfig, GCSCredentialsPath): - os.environ[GOOGLE_CREDENTIALS] = str(gcs_credentials.gcsConfig.__root__) + if isinstance(gcp_credentials.gcpConfig, GcpCredentialsPath): + os.environ[GOOGLE_CREDENTIALS] = str(gcp_credentials.gcpConfig.__root__) return - if gcs_credentials.gcsConfig.projectId is None: + if gcp_credentials.gcpConfig.projectId is None: logger.info( "No credentials available, using the current environment permissions authenticated via gcloud SDK." ) return - if isinstance(gcs_credentials.gcsConfig, GcsCredentialsValues): + if isinstance(gcp_credentials.gcpConfig, GcpCredentialsValues): if ( - gcs_credentials.gcsConfig.projectId - and not gcs_credentials.gcsConfig.privateKey + gcp_credentials.gcpConfig.projectId + and not gcp_credentials.gcpConfig.privateKey ): logger.info( "Overriding default projectid, using the current environment permissions authenticated via gcloud SDK." ) return - credentials_dict = build_google_credentials_dict(gcs_credentials.gcsConfig) + credentials_dict = build_google_credentials_dict(gcp_credentials.gcpConfig) tmp_credentials_file = create_credential_tmp_file(credentials=credentials_dict) os.environ[GOOGLE_CREDENTIALS] = tmp_credentials_file return - raise InvalidGcsConfigException( - f"Error trying to set GCS credentials with {gcs_credentials}." + raise InvalidGcpConfigException( + f"Error trying to set GCP credentials with {gcp_credentials}." " Check https://docs.open-metadata.org/connectors/database/bigquery " ) diff --git a/ingestion/src/metadata/utils/dbt_config.py b/ingestion/src/metadata/utils/dbt_config.py index e4d0b259d2c..f1c21edf5b9 100644 --- a/ingestion/src/metadata/utils/dbt_config.py +++ b/ingestion/src/metadata/utils/dbt_config.py @@ -281,7 +281,7 @@ def _(config: DbtGcsConfig): bucket_name, prefix = get_dbt_prefix_config(config) from google.cloud import storage # pylint: disable=import-outside-toplevel - set_google_credentials(gcs_credentials=config.dbtSecurityConfig) + set_google_credentials(gcp_credentials=config.dbtSecurityConfig) client = storage.Client() if not bucket_name: buckets = client.list_buckets() diff --git a/ingestion/tests/cli_e2e/database/bigquery/bigquery.yaml b/ingestion/tests/cli_e2e/database/bigquery/bigquery.yaml index 169a0956047..e6e0ab54b29 100644 --- a/ingestion/tests/cli_e2e/database/bigquery/bigquery.yaml +++ b/ingestion/tests/cli_e2e/database/bigquery/bigquery.yaml @@ -7,7 +7,7 @@ source: type: BigQuery taxonomyProjectID: [$E2E_BQ_PROJECT_ID_TAXONOMY] credentials: - gcsConfig: + gcpConfig: type: service_account projectId: $E2E_BQ_PROJECT_ID privateKeyId: $E2E_BQ_PRIVATE_KEY_ID diff --git a/ingestion/tests/integration/ometa/test_ometa_service_api.py b/ingestion/tests/integration/ometa/test_ometa_service_api.py index 14e4a5dd074..735d34e201b 100644 --- a/ingestion/tests/integration/ometa/test_ometa_service_api.py +++ b/ingestion/tests/integration/ometa/test_ometa_service_api.py @@ -157,7 +157,7 @@ class OMetaServiceTest(TestCase): "config": { "type": "BigQuery", "credentials": { - "gcsConfig": { + "gcpConfig": { "type": "service_account", "projectId": "projectID", "privateKeyId": "privateKeyId", diff --git a/ingestion/tests/unit/metadata/common/resources/bigquery.yaml b/ingestion/tests/unit/metadata/common/resources/bigquery.yaml index 52c61f87968..229c008968e 100644 --- a/ingestion/tests/unit/metadata/common/resources/bigquery.yaml +++ b/ingestion/tests/unit/metadata/common/resources/bigquery.yaml @@ -6,7 +6,7 @@ source: config: type: BigQuery credentials: - gcsConfig: "${KEY_PATH}" + gcpConfig: "${KEY_PATH}" sourceConfig: config: type: DatabaseMetadata diff --git a/ingestion/tests/unit/metadata/common/test_ingest_file_load.py b/ingestion/tests/unit/metadata/common/test_ingest_file_load.py index dd9b216ab57..4b57e3f9021 100644 --- a/ingestion/tests/unit/metadata/common/test_ingest_file_load.py +++ b/ingestion/tests/unit/metadata/common/test_ingest_file_load.py @@ -47,7 +47,7 @@ class TestIngestionFileLoad(TestCase): ) self.assertEqual( config_dict["source"]["serviceConnection"]["config"]["credentials"][ - "gcsConfig" + "gcpConfig" ], "/random/path", ) diff --git a/ingestion/tests/unit/profiler/test_profiler_partitions.py b/ingestion/tests/unit/profiler/test_profiler_partitions.py index 3ea1e5c550a..ee595cea537 100644 --- a/ingestion/tests/unit/profiler/test_profiler_partitions.py +++ b/ingestion/tests/unit/profiler/test_profiler_partitions.py @@ -42,7 +42,7 @@ mock_bigquery_config = { "type": "bigquery", "serviceName": "local_bigquery", "serviceConnection": { - "config": {"type": "BigQuery", "credentials": {"gcsConfig": {}}} + "config": {"type": "BigQuery", "credentials": {"gcpConfig": {}}} }, "sourceConfig": { "config": { diff --git a/ingestion/tests/unit/source/test_source_parsing.py b/ingestion/tests/unit/source/test_source_parsing.py index 91027a5ae62..e7ee67f4073 100644 --- a/ingestion/tests/unit/source/test_source_parsing.py +++ b/ingestion/tests/unit/source/test_source_parsing.py @@ -176,7 +176,7 @@ def test_bigquery(): "config": { "type": "BigQuery", "credentials": { - "gcsConfig": { + "gcpConfig": { "type": "service_account", "projectId": "projectID", "privateKeyId": "privateKeyId", @@ -349,7 +349,7 @@ def test_dynamo_db(): assert isinstance(config.serviceConnection.__root__.config, DynamoDBConnection) -def test_gcs(): +def test_gcp(): """TODO""" diff --git a/ingestion/tests/unit/test_credentials.py b/ingestion/tests/unit/test_credentials.py index 626adc1c192..6721da1aa99 100644 --- a/ingestion/tests/unit/test_credentials.py +++ b/ingestion/tests/unit/test_credentials.py @@ -15,8 +15,8 @@ from unittest import TestCase from pydantic import SecretStr -from metadata.generated.schema.security.credentials.gcsValues import ( - GcsCredentialsValues, +from metadata.generated.schema.security.credentials.gcpValues import ( + GcpCredentialsValues, ) from metadata.utils.credentials import ( InvalidPrivateKeyException, @@ -51,7 +51,7 @@ BYaz18xB1znonY33RIkCQQDE3wAWxFrvr582J12qJkE4enmNhRJFdcSREDX54d/5 VEhPQF0i0tUU7Fl071hcYaiQoZx4nIjN+NG6p5QKbl6k -----END RSA PRIVATE KEY-----""" - gcs_values = GcsCredentialsValues( + gcp_values = GcpCredentialsValues( type="my_type", projectId=["project_id"], privateKeyId="private_key_id", @@ -74,11 +74,11 @@ VEhPQF0i0tUU7Fl071hcYaiQoZx4nIjN+NG6p5QKbl6k "client_x509_cert_url": "http://localhost:1234", } - build_google_credentials_dict(gcs_values) + build_google_credentials_dict(gcp_values) - self.assertEqual(expected_dict, build_google_credentials_dict(gcs_values)) + self.assertEqual(expected_dict, build_google_credentials_dict(gcp_values)) - gcs_values.privateKey = SecretStr("I don't think I am a proper Private Key") + gcp_values.privateKey = SecretStr("I don't think I am a proper Private Key") with self.assertRaises(InvalidPrivateKeyException): - build_google_credentials_dict(gcs_values) + build_google_credentials_dict(gcp_values) diff --git a/ingestion/tests/unit/test_handle_partitions.py b/ingestion/tests/unit/test_handle_partitions.py index 2466c5c08f4..c0ff3afa363 100644 --- a/ingestion/tests/unit/test_handle_partitions.py +++ b/ingestion/tests/unit/test_handle_partitions.py @@ -36,7 +36,7 @@ mock_bigquery_config = { "type": "bigquery", "serviceName": "local_bigquery7", "serviceConnection": { - "config": {"type": "BigQuery", "credentials": {"gcsConfig": {}}} + "config": {"type": "BigQuery", "credentials": {"gcpConfig": {}}} }, "sourceConfig": { "config": { diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/credentials/index.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/credentials/index.md index c7c387429b1..b8a1b49b041 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/credentials/index.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/credentials/index.md @@ -137,9 +137,9 @@ In that case, you could use the following command after setting up the ingestion aws-vault exec -- $SHELL -c 'metadata ingest -c ' ``` -### GCS Credentials +### GCP Credentials -The GCS Credentials are based on the following [JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsCredentials.json). +The GCP Credentials are based on the following [JSON Schema](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpCredentials.json). These are the fields that you can export when preparing a Service Account. Once the account is created, you can see the fields in the exported JSON file from: diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/airflow.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/airflow.md index ab2376400bb..fada955f054 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/airflow.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/airflow.md @@ -120,13 +120,13 @@ This is a sample config for BigQuery: **hostPort**: BigQuery APIs URL. By default the API URL is `bigquery.googleapis.com` you can modify this if you have custom implementation of BigQuery. **credentials**: -You can authenticate with your bigquery instance using either `GCS Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCS Credentials Values` from the service account key file. +You can authenticate with your bigquery instance using either `GCP Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCP Credentials Values` from the service account key file. You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam-service-account-keys-create-console) documentation on how to create the service account keys and download it. -**gcsConfig:** +**gcpConfig:** **1.** Passing the raw credential values provided by BigQuery. This requires us to provide the following information, all provided by BigQuery: @@ -142,14 +142,14 @@ You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam - **clientX509CertUrl**: This is the URL of the certificate that verifies the authenticity of the service account. To fetch this key, look for the value associated with the `client_x509_cert_url` key in the service account key file. **2.** Passing a local file path that contains the credentials: - - **gcsCredentialsPath** + - **gcpCredentialsPath** - If you prefer to pass the credentials file, you can do so as follows: ```yaml credentials: - gcsConfig: + gcpConfig: ``` **Classification Name (Optional)**: If the Tag import is enabled, the name of the Classification will be created at OpenMetadata. @@ -170,14 +170,14 @@ In this field you need to specify the location/region in which the taxonomy was Location used to query `INFORMATION_SCHEMA.JOBS_BY_PROJECT` to fetch usage data. You can pass multi-regions, such as `us` or `eu`, or your specific region such as `us-east1`. Australia and Asia multi-regions are not yet supported. - If you want to use [ADC authentication](https://cloud.google.com/docs/authentication#adc) for BigQuery you can just leave -the GCS credentials empty. This is why they are not marked as required. +the GCP credentials empty. This is why they are not marked as required. ```yaml ... config: type: BigQuery credentials: - gcsConfig: {} + gcpConfig: {} ... ``` @@ -248,7 +248,7 @@ source: ``` ```yaml {% srNumber=1 %} credentials: - gcsConfig: + gcpConfig: type: My Type projectId: project ID # ["project-id-1", "project-id-2"] privateKeyId: us-east-2 diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/cli.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/cli.md index 0535e3859c4..d1c5b089bea 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/cli.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/cli.md @@ -107,13 +107,13 @@ This is a sample config for BigQuery: **hostPort**: BigQuery APIs URL. By default the API URL is `bigquery.googleapis.com` you can modify this if you have custom implementation of BigQuery. **credentials**: -You can authenticate with your bigquery instance using either `GCS Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCS Credentials Values` from the service account key file. +You can authenticate with your bigquery instance using either `GCP Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCP Credentials Values` from the service account key file. You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam-service-account-keys-create-console) documentation on how to create the service account keys and download it. -**gcsConfig:** +**gcpConfig:** **1.** Passing the raw credential values provided by BigQuery. This requires us to provide the following information, all provided by BigQuery: @@ -129,7 +129,7 @@ You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam - **clientX509CertUrl**: This is the URL of the certificate that verifies the authenticity of the service account. To fetch this key, look for the value associated with the `client_x509_cert_url` key in the service account key file. **2.** Passing a local file path that contains the credentials: - - **gcsCredentialsPath** + - **gcpCredentialsPath** **Taxonomy Project ID (Optional)**: Bigquery uses taxonomies to create hierarchical groups of policy tags. To apply access controls to BigQuery columns, tag the columns with policy tags. Learn more about how yo can create policy tags and set up column-level access control [here](https://cloud.google.com/bigquery/docs/column-level-security) @@ -151,18 +151,18 @@ Location used to query `INFORMATION_SCHEMA.JOBS_BY_PROJECT` to fetch usage data. - If you prefer to pass the credentials file, you can do so as follows: ```yaml credentials: - gcsConfig: + gcpConfig: ``` - If you want to use [ADC authentication](https://cloud.google.com/docs/authentication#adc) for BigQuery you can just leave -the GCS credentials empty. This is why they are not marked as required. +the GCP credentials empty. This is why they are not marked as required. ```yaml ... config: type: BigQuery credentials: - gcsConfig: {} + gcpConfig: {} ... ``` @@ -233,7 +233,7 @@ source: ``` ```yaml {% srNumber=1 %} credentials: - gcsConfig: + gcpConfig: type: My Type projectId: project ID # ["project-id-1", "project-id-2"] privateKeyId: us-east-2 diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/index.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/index.md index 5583908277e..76aa41c4f70 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/index.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/bigquery/index.md @@ -224,12 +224,12 @@ desired. **Host and Port**: BigQuery APIs URL. By default the API URL is `bigquery.googleapis.com` you can modify this if you have custom implementation of BigQuery. -**GCS Credentials**: -You can authenticate with your bigquery instance using either `GCS Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCS Credentials Values` from the service account key file. +**GCP Credentials**: +You can authenticate with your bigquery instance using either `GCP Credentials Path` where you can specify the file path of the service account key or you can pass the values directly by choosing the `GCP Credentials Values` from the service account key file. You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam-service-account-keys-create-console) documentation on how to create the service account keys and download it. -**GCS Credentials Values**: Passing the raw credential values provided by BigQuery. This requires us to provide the following information, all provided by BigQuery: +**GCP Credentials Values**: Passing the raw credential values provided by BigQuery. This requires us to provide the following information, all provided by BigQuery: - **Credentials type**: Credentials Type is the type of the account, for a service account the value of this field is `service_account`. To fetch this key, look for the value associated with the `type` key in the service account key file. - **Project ID**: A project ID is a unique string used to differentiate your project from all others in Google Cloud. To fetch this key, look for the value associated with the `project_id` key in the service account key file. You can also pass multiple project id to ingest metadata from different BigQuery projects into one service. @@ -242,7 +242,7 @@ You can checkout [this](https://cloud.google.com/iam/docs/keys-create-delete#iam - **Authentication Provider X509 Certificate URL**: This is the URL of the certificate that verifies the authenticity of the authorization server. To fetch this key, look for the value associated with the `auth_provider_x509_cert_url` key in the service account key file. The Default value for Auth Provider X509Cert URL is https://www.googleapis.com/oauth2/v1/certs - **Client X509Cert URL**: This is the URL of the certificate that verifies the authenticity of the service account. To fetch this key, look for the value associated with the `client_x509_cert_url` key in the service account key file. -**GCS Credentials Path**: Passing a local file path that contains the credentials. +**GCP Credentials Path**: Passing a local file path that contains the credentials. **Taxonomy Project ID (Optional)**: Bigquery uses taxonomies to create hierarchical groups of policy tags. To apply access controls to BigQuery columns, tag the columns with policy tags. Learn more about how yo can create policy tags and set up column-level access control [here](https://cloud.google.com/bigquery/docs/column-level-security) @@ -265,7 +265,7 @@ Location used to query `INFORMATION_SCHEMA.JOBS_BY_PROJECT` to fetch usage data. If you want to use [ADC authentication](https://cloud.google.com/docs/authentication#adc) for BigQuery you can just leave -the GCS credentials empty. This is why they are not marked as required. +the GCP credentials empty. This is why they are not marked as required. {% /extraContent %} diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/airflow.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/airflow.md index 1d2a8ca2f04..142796462fc 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/airflow.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/airflow.md @@ -89,7 +89,7 @@ pip3 install "openmetadata-ingestion[datalake-s3]" #### GCS installation ```bash -pip3 install "openmetadata-ingestion[datalake-gcs]" +pip3 install "openmetadata-ingestion[datalake-gcp]" ``` #### Azure installation @@ -261,7 +261,7 @@ workflowConfig: * **authProviderX509CertUrl**: [https://www.googleapis.com/oauth2/v1/certs](https://www.googleapis.com/oauth2/v1/certs) by default * **clientX509CertUrl** * **bucketName**: name of the bucket in GCS -* **Prefix**: prefix in gcs bucket +* **Prefix**: prefix in gcp bucket {% /codeInfo %} @@ -315,7 +315,7 @@ source: securityConfig: ``` ```yaml {% srNumber=5 %} - gcsConfig: + gcpConfig: type: type of account projectId: project id privateKeyId: private key id diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/cli.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/cli.md index 7679301f24b..3f512bebaf7 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/cli.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/cli.md @@ -90,7 +90,7 @@ pip3 install "openmetadata-ingestion[datalake-s3]" #### GCS installation ```bash -pip3 install "openmetadata-ingestion[datalake-gcs]" +pip3 install "openmetadata-ingestion[datalake-gcp]" ``` #### Azure installation @@ -264,7 +264,7 @@ workflowConfig: * **authProviderX509CertUrl**: [https://www.googleapis.com/oauth2/v1/certs](https://www.googleapis.com/oauth2/v1/certs) by default * **clientX509CertUrl** * **bucketName**: name of the bucket in GCS -* **Prefix**: prefix in gcs bucket +* **Prefix**: prefix in gcp bucket {% /codeInfo %} @@ -318,7 +318,7 @@ source: securityConfig: ``` ```yaml {% srNumber=5 %} - gcsConfig: + gcpConfig: type: type of account projectId: project id privateKeyId: private key id diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/index.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/index.md index 04e3869075c..9d7c9ceaa6e 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/index.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/database/datalake/index.md @@ -326,8 +326,8 @@ We support two ways of authenticating to GCS: {% stepVisualInfo %} {% image - src="/images/v1.0.0/connectors/datalake/service-connection-using-gcs.png" - alt="service-connection-using-gcs"/%} + src="/images/v1.0.0/connectors/datalake/service-connection-using-gcp.png" + alt="service-connection-using-gcp"/%} {% /stepVisualInfo %} diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-cli.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-cli.md index 4a03eda1eff..05aa8d96f2a 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-cli.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-cli.md @@ -23,7 +23,7 @@ We can create a workflow that will obtain the dbt information from the dbt files ### 1. Create the workflow configuration -Configure the dbt.yaml file according keeping only one of the required source (local, http, gcs, s3). +Configure the dbt.yaml file according keeping only one of the required source (local, http, gcp, s3). The dbt files should be present on the source mentioned and should have the necssary permissions to be able to access the files. Enter the name of your database service from OpenMetadata in the `serviceName` key in the yaml @@ -59,7 +59,7 @@ source: # dbtObjectPrefix: "dbt/" # # For GCS Values # dbtSecurityConfig: # These are modeled after all GCS credentials - # gcsConfig: + # gcpConfig: # type: My Type # projectId: project ID # privateKeyId: us-east-2 @@ -75,7 +75,7 @@ source: # clientX509CertUrl: https://cert.url (URI) # # For GCS Values # dbtSecurityConfig: # These are modeled after all GCS credentials - # gcsConfig: path-to-credentials-file.json + # gcpConfig: path-to-credentials-file.json # dbtPrefixConfig: # dbtBucketName: bucket # dbtObjectPrefix: "dbt/" diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-ui.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-ui.md index 64bf40efeb5..a127dd553af 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-ui.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/ingestion/workflows/dbt/ingest-dbt-ui.md @@ -61,7 +61,7 @@ The name of the s3 bucket and prefix path to the folder in which the dbt files a #### Google Cloud Storage Buckets -OpenMetadata connects to the GCS bucket via the credentials provided and scans the gcs buckets for `manifest.json`, `catalog.json` and `run_results.json` files. +OpenMetadata connects to the GCS bucket via the credentials provided and scans the gcp buckets for `manifest.json`, `catalog.json` and `run_results.json` files. The name of the GCS bucket and prefix path to the folder in which the dbt files are stored can be provided. In the case where these parameters are not provided all the buckets are scanned for the files. @@ -69,8 +69,8 @@ GCS credentials can be stored in two ways: **1.** Entering the credentials directly into the form {% image - src="/images/v1.0.0/features/ingestion/workflows/dbt/gcs-bucket-form.png" - alt="gcs-storage-bucket-form" + src="/images/v1.0.0/features/ingestion/workflows/dbt/gcp-bucket-form.png" + alt="gcp-storage-bucket-form" caption="GCS Bucket config" /%} @@ -78,8 +78,8 @@ GCS credentials can be stored in two ways: **2.** Entering the path of file in which the GCS bucket credentials are stored. {% image - src="/images/v1.0.0/features/ingestion/workflows/dbt/gcs-bucket-path.png" - alt="gcs-storage-bucket-path" + src="/images/v1.0.0/features/ingestion/workflows/dbt/gcp-bucket-path.png" + alt="gcp-storage-bucket-path" caption="GCS Bucket Path Config" /%} diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/gcs.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/gcs.md index 1dd453c4ad8..4ac419a7546 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/gcs.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/gcs.md @@ -1,6 +1,6 @@ --- title: Extract GCS Composer Metadata -slug: /connectors/pipeline/airflow/gcs +slug: /connectors/pipeline/airflow/gcp --- # Extract GCS Composer Metadata @@ -81,7 +81,7 @@ default_args = { config = """ source: type: airflow - serviceName: airflow_gcs_composer + serviceName: airflow_gcp_composer serviceConnection: config: type: Airflow @@ -101,7 +101,7 @@ workflowConfig: hostPort: https://sandbox.getcollate.io/api authProvider: google securityConfig: - secretKey: /home/airflow/gcs/data/gcs_creds_beta.json + secretKey: /home/airflow/gcp/data/gcp_creds_beta.json """ @@ -183,7 +183,7 @@ from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import Kubernete config = """ source: type: airflow - serviceName: airflow_gcs_composer_k8s_op + serviceName: airflow_gcp_composer_k8s_op serviceConnection: config: type: Airflow @@ -274,10 +274,10 @@ workflowConfig: Against Google SSO we need to use the [Cloud Storage](https://cloud.google.com/composer/docs/concepts/cloud-storage) to pass the `secretKey` JSON file. Upload the file to the `gs://bucket-name/data` directory, which will be mapped -against `/home/airflow/gcs/data/` in Airflow. +against `/home/airflow/gcp/data/` in Airflow. -You can see in the example above how our file is named `gcs_creds_beta.json`, which gets resolved in Airflow as -`/home/airflow/gcs/data/gcs_creds_beta.json`. +You can see in the example above how our file is named `gcp_creds_beta.json`, which gets resolved in Airflow as +`/home/airflow/gcp/data/gcp_creds_beta.json`. The workflow config here would look like: @@ -287,5 +287,5 @@ workflowConfig: hostPort: https://sandbox.getcollate.io/api authProvider: google securityConfig: - secretKey: /home/airflow/gcs/data/gcs_creds_beta.json + secretKey: /home/airflow/gcp/data/gcp_creds_beta.json ``` diff --git a/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/index.md b/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/index.md index 703681776ef..34b143d8e3f 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/index.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/connectors/pipeline/airflow/index.md @@ -16,7 +16,7 @@ extract metadata directly from your Airflow instance or via the CLI: {% tile title="Ingest directly from your Airflow" description="Configure the ingestion with a DAG on your own Airflow instance" - link="/connectors/pipeline/airflow/gcs" + link="/connectors/pipeline/airflow/gcp" / %} {% tile title="Ingest with the CLI" diff --git a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/bigQueryConnection.md b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/bigQueryConnection.md index dd3a8dca25a..777312178d3 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/bigQueryConnection.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/bigQueryConnection.md @@ -12,7 +12,7 @@ slug: /main-concepts/metadata-standard/schemas/entity/services/connections/datab - **`type`**: Service Type. Refer to *#/definitions/bigqueryType*. Default: `BigQuery`. - **`scheme`**: SQLAlchemy driver scheme options. Refer to *#/definitions/bigqueryScheme*. Default: `bigquery`. - **`hostPort`** *(string)*: BigQuery APIs URL. Default: `bigquery.googleapis.com`. -- **`credentials`**: GCS Credentials. Refer to *../../../../security/credentials/gcsCredentials.json*. +- **`credentials`**: GCP Credentials. Refer to *../../../../security/credentials/gcpCredentials.json*. - **`tagCategoryName`** *(string)*: Custom OpenMetadata Tag category name for BigQuery policy tags. Default: `BigqueryPolicyTags`. - **`taxonomyLocation`** *(string)*: Taxonomy location used to fetch policy tags. Default: `us`. - **`usageLocation`** *(string)*: Location used to query INFORMATION_SCHEMA.JOBS_BY_PROJECT to fetch usage data. You can pass multi-regions, such as `us` or `eu`, or you specific region. Australia and Asia multi-regions are not yet in GA. Default: `us`. diff --git a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/datalakeConnection.md b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/datalakeConnection.md index 28f4da40c9c..120ac2f11c4 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/datalakeConnection.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/entity/services/connections/database/datalakeConnection.md @@ -20,7 +20,7 @@ slug: /main-concepts/metadata-standard/schemas/entity/services/connections/datab - **`datalakeType`** *(string)*: Service type. Must be one of: `['Datalake']`. Default: `Datalake`. - **`GCSConfig`**: DataLake Catalog and Manifest files in GCS storage. We will search for catalog.json and manifest.json. - - **`securityConfig`**: Refer to *../../../../security/credentials/gcsCredentials.json*. + - **`securityConfig`**: Refer to *../../../../security/credentials/gcpCredentials.json*. - **`S3Config`**: DataLake Catalog and Manifest files in S3 bucket. We will search for catalog.json and manifest.json. - **`securityConfig`**: Refer to *../../../../security/credentials/awsCredentials.json*. diff --git a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/metadataIngestion/databaseServiceMetadataPipeline.md b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/metadataIngestion/databaseServiceMetadataPipeline.md index c59ee9ec167..748299a8932 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/metadataIngestion/databaseServiceMetadataPipeline.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/metadataIngestion/databaseServiceMetadataPipeline.md @@ -37,7 +37,7 @@ slug: /main-concepts/metadata-standard/schemas/metadataingestion/databaseservice - **`dbtSecurityConfig`**: Refer to *../security/credentials/awsCredentials.json*. - **`dbtPrefixConfig`**: Refer to *#/definitions/dbtBucketDetails*. - **`dbtGCSConfig`**: DBT Catalog and Manifest files in GCS storage. We will search for catalog.json and manifest.json. - - **`dbtSecurityConfig`**: Refer to *../security/credentials/gcsCredentials.json*. + - **`dbtSecurityConfig`**: Refer to *../security/credentials/gcpCredentials.json*. - **`dbtPrefixConfig`**: Refer to *#/definitions/dbtBucketDetails*. diff --git a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/security/credentials/gcsCredentials.md b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/security/credentials/gcsCredentials.md index d6136ecda85..a596faa41ed 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/security/credentials/gcsCredentials.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/security/credentials/gcsCredentials.md @@ -1,6 +1,6 @@ --- -title: gcsCredentials -slug: /main-concepts/metadata-standard/schemas/security/credentials/gcscredentials +title: GCPCredentials +slug: /main-concepts/metadata-standard/schemas/security/credentials/gcpcredentials --- # GCSCredentials @@ -9,7 +9,7 @@ slug: /main-concepts/metadata-standard/schemas/security/credentials/gcscredentia ## Properties -- **`gcsConfig`**: GCS configs. +- **`gcpConfig`**: GCS configs. ## Definitions - **`GCSValues`** *(object)*: GCS Credentials. Cannot contain additional properties. diff --git a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/type/storage.md b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/type/storage.md index 2d215995fc5..78cc7a94637 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/type/storage.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/main-concepts/metadata-standard/schemas/type/storage.md @@ -12,7 +12,7 @@ slug: /main-concepts/metadata-standard/schemas/type/storage - **`storageServiceType`** *(string)*: Type of storage service such as S3, GCS, HDFS... Must be one of: `['S3', 'GCS', 'HDFS', 'ABFS']`. - **`storageClassType`** *(string)*: Type of storage class for the storage service. - **`s3StorageClass`** *(string)*: Type of storage class offered by S3. Must be one of: `['DEEP_ARCHIVE', 'GLACIER', 'INTELLIGENT_TIERING', 'ONEZONE_IA', 'OUTPOSTS', 'REDUCED_REDUNDANCY', 'STANDARD', 'STANDARD_IA']`. -- **`gcsStorageClass`** *(string)*: Type of storage class offered by GCS. Must be one of: `['ARCHIVE', 'COLDLINE', 'DURABLE_REDUCED_AVAILABILITY', 'MULTI_REGIONAL', 'NEARLINE', 'REGIONAL', 'STANDARD']`. +- **`gcpStorageClass`** *(string)*: Type of storage class offered by GCS. Must be one of: `['ARCHIVE', 'COLDLINE', 'DURABLE_REDUCED_AVAILABILITY', 'MULTI_REGIONAL', 'NEARLINE', 'REGIONAL', 'STANDARD']`. - **`abfsStorageClass`** *(string)*: Type of storage class offered by ABFS. Must be one of: `['ARCHIVE', 'HOT', 'COOL']`. diff --git a/openmetadata-docs/content/v1.1.0-snapshot/menu.md b/openmetadata-docs/content/v1.1.0-snapshot/menu.md index c79bbbcb736..cbce7fe86a4 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/menu.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/menu.md @@ -484,7 +484,7 @@ site_menu: - category: Connectors / Pipeline / Airflow / CLI url: /connectors/pipeline/airflow/cli - category: Connectors / Pipeline / Airflow / GCS Composer - url: /connectors/pipeline/airflow/gcs + url: /connectors/pipeline/airflow/gcp - category: Connectors / Pipeline / Airflow / Lineage Backend url: /connectors/pipeline/airflow/lineage-backend - category: Connectors / Pipeline / Airflow / Lineage Operator @@ -1053,7 +1053,7 @@ site_menu: - category: Main Concepts / Metadata Standard / Schemas / Security / Credentials / AWSCredentials url: /main-concepts/metadata-standard/schemas/security/credentials/awscredentials - category: Main Concepts / Metadata Standard / Schemas / Security / Credentials / GcsCredentials - url: /main-concepts/metadata-standard/schemas/security/credentials/gcscredentials + url: /main-concepts/metadata-standard/schemas/security/credentials/gcpcredentials - category: Main Concepts / Metadata Standard / Schemas / Security / Credentials url: /main-concepts/metadata-standard/schemas/security/credentials - category: Main Concepts / Metadata Standard / Schemas / Security diff --git a/openmetadata-docs/content/v1.1.0-snapshot/sdk/python/ingestion/dbt.md b/openmetadata-docs/content/v1.1.0-snapshot/sdk/python/ingestion/dbt.md index 2292c8f94d5..503786ca3bb 100644 --- a/openmetadata-docs/content/v1.1.0-snapshot/sdk/python/ingestion/dbt.md +++ b/openmetadata-docs/content/v1.1.0-snapshot/sdk/python/ingestion/dbt.md @@ -11,7 +11,7 @@ We will be going through a series of steps on how to configure dbt in OpenMetada ## Adding dbt configuration in JSON Config Below is an example showing the yaml config of the Redshift connector. The below example shows how to fetch the dbt files from AWS s3 bucket. -For more information on getting the dbt files from other sources like gcs, file server etc. please take a look [here](/sdk/python/ingestion/dbt#locate-the-dbt-files). +For more information on getting the dbt files from other sources like gcp, file server etc. please take a look [here](/sdk/python/ingestion/dbt#locate-the-dbt-files). ```yaml source: @@ -49,7 +49,7 @@ Add the details of the AWS s3 bucket in the above config: - `dbtPrefixConfig`: Bucket name and path of the dbt files in bucket ## Locate the dbt Files -The `get_dbt_details` method takes in the source config provided in the json and detects source type (gcs, s3, local or file server) based on the fields provided in the config. +The `get_dbt_details` method takes in the source config provided in the json and detects source type (gcp, s3, local or file server) based on the fields provided in the config. ```python from metadata.utils.dbt_config import get_dbt_details diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/BigQueryConnectionClassConverter.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/BigQueryConnectionClassConverter.java index a71cb76e7a8..86da5c3e0f2 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/BigQueryConnectionClassConverter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/BigQueryConnectionClassConverter.java @@ -14,7 +14,7 @@ package org.openmetadata.service.secrets.converter; import java.util.List; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.schema.services.connections.database.BigQueryConnection; import org.openmetadata.service.util.JsonUtils; @@ -29,8 +29,8 @@ public class BigQueryConnectionClassConverter extends ClassConverter { public Object convert(Object object) { BigQueryConnection bigQueryConnection = (BigQueryConnection) JsonUtils.convertValue(object, this.clazz); - tryToConvertOrFail(bigQueryConnection.getCredentials(), List.of(GCSCredentials.class)) - .ifPresent(obj -> bigQueryConnection.setCredentials((GCSCredentials) obj)); + tryToConvertOrFail(bigQueryConnection.getCredentials(), List.of(GCPCredentials.class)) + .ifPresent(obj -> bigQueryConnection.setCredentials((GCPCredentials) obj)); return bigQueryConnection; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/ClassConverterFactory.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/ClassConverterFactory.java index c7b41c9ff55..8e6155c9223 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/ClassConverterFactory.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/ClassConverterFactory.java @@ -20,7 +20,7 @@ import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest; import org.openmetadata.schema.entity.automations.Workflow; import org.openmetadata.schema.metadataIngestion.DbtPipeline; import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.schema.services.connections.dashboard.LookerConnection; import org.openmetadata.schema.services.connections.dashboard.SupersetConnection; import org.openmetadata.schema.services.connections.dashboard.TableauConnection; @@ -47,9 +47,9 @@ public final class ClassConverterFactory { Map.entry(DatalakeConnection.class, new DatalakeConnectionClassConverter()), Map.entry(DbtGCSConfig.class, new DbtGCSConfigClassConverter()), Map.entry(DbtPipeline.class, new DbtPipelineClassConverter()), - Map.entry(GCSConfig.class, new GCSConfigClassConverter()), - Map.entry(GCSCredentials.class, new GcsCredentialsClassConverter()), - Map.entry(GcsConnection.class, new GcsConnectionClassConverter()), + Map.entry(GCSConfig.class, new GCPConfigClassConverter()), + Map.entry(GCPCredentials.class, new GcpCredentialsClassConverter()), + Map.entry(GcsConnection.class, new GcpConnectionClassConverter()), Map.entry(LookerConnection.class, new LookerConnectionClassConverter()), Map.entry(OpenMetadataConnection.class, new OpenMetadataConnectionClassConverter()), Map.entry(SSOAuthMechanism.class, new SSOAuthMechanismClassConverter()), diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/DbtGCSConfigClassConverter.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/DbtGCSConfigClassConverter.java index dc99a2f8d40..1b129feefad 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/DbtGCSConfigClassConverter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/DbtGCSConfigClassConverter.java @@ -15,7 +15,7 @@ package org.openmetadata.service.secrets.converter; import java.util.List; import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.service.util.JsonUtils; /** Converter class to get an `DbtGCSConfig` object. */ @@ -29,8 +29,8 @@ public class DbtGCSConfigClassConverter extends ClassConverter { public Object convert(Object object) { DbtGCSConfig dbtGCSConfig = (DbtGCSConfig) JsonUtils.convertValue(object, this.clazz); - tryToConvertOrFail(dbtGCSConfig.getDbtSecurityConfig(), List.of(GCSCredentials.class)) - .ifPresent(obj -> dbtGCSConfig.setDbtSecurityConfig((GCSCredentials) obj)); + tryToConvertOrFail(dbtGCSConfig.getDbtSecurityConfig(), List.of(GCPCredentials.class)) + .ifPresent(obj -> dbtGCSConfig.setDbtSecurityConfig((GCPCredentials) obj)); return dbtGCSConfig; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCSConfigClassConverter.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCPConfigClassConverter.java similarity index 81% rename from openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCSConfigClassConverter.java rename to openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCPConfigClassConverter.java index 1f2a6e815b0..779c22b77b4 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCSConfigClassConverter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GCPConfigClassConverter.java @@ -14,14 +14,14 @@ package org.openmetadata.service.secrets.converter; import java.util.List; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.schema.services.connections.database.datalake.GCSConfig; import org.openmetadata.service.util.JsonUtils; /** Converter class to get an `GCSConfig` object. */ -public class GCSConfigClassConverter extends ClassConverter { +public class GCPConfigClassConverter extends ClassConverter { - public GCSConfigClassConverter() { + public GCPConfigClassConverter() { super(GCSConfig.class); } @@ -29,8 +29,8 @@ public class GCSConfigClassConverter extends ClassConverter { public Object convert(Object object) { GCSConfig gcsConfig = (GCSConfig) JsonUtils.convertValue(object, this.clazz); - tryToConvertOrFail(gcsConfig.getSecurityConfig(), List.of(GCSCredentials.class)) - .ifPresent(obj -> gcsConfig.setSecurityConfig((GCSCredentials) obj)); + tryToConvertOrFail(gcsConfig.getSecurityConfig(), List.of(GCPCredentials.class)) + .ifPresent(obj -> gcsConfig.setSecurityConfig((GCPCredentials) obj)); return gcsConfig; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsConnectionClassConverter.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpConnectionClassConverter.java similarity index 81% rename from openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsConnectionClassConverter.java rename to openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpConnectionClassConverter.java index 208a23f21c2..0237a3c83e1 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsConnectionClassConverter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpConnectionClassConverter.java @@ -14,14 +14,14 @@ package org.openmetadata.service.secrets.converter; import java.util.List; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.schema.services.connections.storage.GcsConnection; import org.openmetadata.service.util.JsonUtils; /** Converter class to get an `GcsConnection` object. */ -public class GcsConnectionClassConverter extends ClassConverter { +public class GcpConnectionClassConverter extends ClassConverter { - public GcsConnectionClassConverter() { + public GcpConnectionClassConverter() { super(GcsConnection.class); } @@ -29,8 +29,8 @@ public class GcsConnectionClassConverter extends ClassConverter { public Object convert(Object object) { GcsConnection gcsConnection = (GcsConnection) JsonUtils.convertValue(object, this.clazz); - tryToConvertOrFail(gcsConnection.getCredentials(), List.of(GCSCredentials.class)) - .ifPresent(obj -> gcsConnection.setCredentials((GCSCredentials) obj)); + tryToConvertOrFail(gcsConnection.getCredentials(), List.of(GCPCredentials.class)) + .ifPresent(obj -> gcsConnection.setCredentials((GCPCredentials) obj)); return gcsConnection; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsCredentialsClassConverter.java b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpCredentialsClassConverter.java similarity index 61% rename from openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsCredentialsClassConverter.java rename to openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpCredentialsClassConverter.java index 2f355eee937..b581e025dd7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcsCredentialsClassConverter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/secrets/converter/GcpCredentialsClassConverter.java @@ -14,25 +14,25 @@ package org.openmetadata.service.secrets.converter; import java.util.List; -import org.openmetadata.schema.security.credentials.GCSCredentials; -import org.openmetadata.schema.security.credentials.GCSValues; +import org.openmetadata.schema.security.credentials.GCPCredentials; +import org.openmetadata.schema.security.credentials.GCPValues; import org.openmetadata.service.util.JsonUtils; -/** Converter class to get an `GCSCredentials` object. */ -public class GcsCredentialsClassConverter extends ClassConverter { +/** Converter class to get an `GCPCredentials` object. */ +public class GcpCredentialsClassConverter extends ClassConverter { - private static final List> CONNECTION_CLASSES = List.of(GCSValues.class); + private static final List> CONNECTION_CLASSES = List.of(GCPValues.class); - public GcsCredentialsClassConverter() { - super(GCSCredentials.class); + public GcpCredentialsClassConverter() { + super(GCPCredentials.class); } @Override public Object convert(Object object) { - GCSCredentials gcsCredentials = (GCSCredentials) JsonUtils.convertValue(object, this.clazz); + GCPCredentials gcpCredentials = (GCPCredentials) JsonUtils.convertValue(object, this.clazz); - tryToConvert(gcsCredentials.getGcsConfig(), CONNECTION_CLASSES).ifPresent(gcsCredentials::setGcsConfig); + tryToConvert(gcpCredentials.getGcpConfig(), CONNECTION_CLASSES).ifPresent(gcpCredentials::setGcpConfig); - return gcsCredentials; + return gcpCredentials; } } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/secrets/converter/ClassConverterFactoryTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/secrets/converter/ClassConverterFactoryTest.java index 05e08067580..9d04a5b55cc 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/secrets/converter/ClassConverterFactoryTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/secrets/converter/ClassConverterFactoryTest.java @@ -11,7 +11,7 @@ import org.openmetadata.schema.entity.automations.TestServiceConnectionRequest; import org.openmetadata.schema.entity.automations.Workflow; import org.openmetadata.schema.metadataIngestion.DbtPipeline; import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig; -import org.openmetadata.schema.security.credentials.GCSCredentials; +import org.openmetadata.schema.security.credentials.GCPCredentials; import org.openmetadata.schema.services.connections.dashboard.LookerConnection; import org.openmetadata.schema.services.connections.dashboard.SupersetConnection; import org.openmetadata.schema.services.connections.dashboard.TableauConnection; @@ -33,12 +33,12 @@ public class ClassConverterFactoryTest { DbtGCSConfig.class, DbtPipeline.class, GCSConfig.class, - GCSCredentials.class, GcsConnection.class, LookerConnection.class, OpenMetadataConnection.class, SSOAuthMechanism.class, SupersetConnection.class, + GCPCredentials.class, TableauConnection.class, TestServiceConnectionRequest.class, Workflow.class diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/secrets/masker/TestEntityMasker.java b/openmetadata-service/src/test/java/org/openmetadata/service/secrets/masker/TestEntityMasker.java index 3cf448a2937..304cbd6e3ce 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/secrets/masker/TestEntityMasker.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/secrets/masker/TestEntityMasker.java @@ -20,8 +20,8 @@ import org.openmetadata.schema.metadataIngestion.SourceConfig; import org.openmetadata.schema.metadataIngestion.dbtconfig.DbtGCSConfig; import org.openmetadata.schema.security.SecurityConfiguration; import org.openmetadata.schema.security.client.GoogleSSOClientConfig; -import org.openmetadata.schema.security.credentials.GCSCredentials; -import org.openmetadata.schema.security.credentials.GCSValues; +import org.openmetadata.schema.security.credentials.GCPCredentials; +import org.openmetadata.schema.security.credentials.GCPValues; import org.openmetadata.schema.services.connections.dashboard.SupersetConnection; import org.openmetadata.schema.services.connections.database.BigQueryConnection; import org.openmetadata.schema.services.connections.database.DatalakeConnection; @@ -61,7 +61,7 @@ abstract class TestEntityMasker { @Test void testBigQueryConnectionMasker() { - BigQueryConnection bigQueryConnection = new BigQueryConnection().withCredentials(buildGcsCredentials()); + BigQueryConnection bigQueryConnection = new BigQueryConnection().withCredentials(buildGcpCredentials()); BigQueryConnection masked = (BigQueryConnection) EntityMaskerFactory.createEntityMasker() @@ -218,8 +218,8 @@ abstract class TestEntityMasker { return PASSWORD; } - private GCSCredentials buildGcsCredentials() { - return new GCSCredentials().withGcsConfig(new GCSValues().withPrivateKey(PASSWORD)); + private GCPCredentials buildGcpCredentials() { + return new GCPCredentials().withGcpConfig(new GCPValues().withPrivateKey(PASSWORD)); } private MysqlConnection buildMysqlConnection() { @@ -227,11 +227,11 @@ abstract class TestEntityMasker { } private GCSConfig buildGcsConfig() { - return new GCSConfig().withSecurityConfig(buildGcsCredentials()); + return new GCSConfig().withSecurityConfig(buildGcpCredentials()); } - private String getPrivateKeyFromGcsConfig(GCSCredentials masked) { - return ((GCSValues) masked.getGcsConfig()).getPrivateKey(); + private String getPrivateKeyFromGcsConfig(GCPCredentials masked) { + return ((GCPValues) masked.getGcpConfig()).getPrivateKey(); } private IngestionPipeline buildIngestionPipeline() { @@ -241,7 +241,7 @@ abstract class TestEntityMasker { new SourceConfig() .withConfig( new DbtPipeline() - .withDbtConfigSource(new DbtGCSConfig().withDbtSecurityConfig(buildGcsCredentials())))) + .withDbtConfigSource(new DbtGCSConfig().withDbtSecurityConfig(buildGcpCredentials())))) .withOpenMetadataServerConnection(buildOpenMetadataConnection()); } diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/bigQueryConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/bigQueryConnection.json index 24391aea1fb..5c8020e8aa4 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/bigQueryConnection.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/bigQueryConnection.json @@ -43,9 +43,9 @@ "default": "bigquery.googleapis.com" }, "credentials": { - "title": "GCS Credentials", - "description": "GCS Credentials", - "$ref": "../../../../security/credentials/gcsCredentials.json" + "title": "GCP Credentials", + "description": "GCP Credentials", + "$ref": "../../../../security/credentials/gcpCredentials.json" }, "taxonomyProjectID": { "title": "Taxonomy Project IDs", diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/datalake/gcsConfig.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/datalake/gcsConfig.json index d035b8a4598..fe10c176c6d 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/datalake/gcsConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/datalake/gcsConfig.json @@ -8,7 +8,7 @@ "properties": { "securityConfig": { "title": "DataLake GCS Security Config", - "$ref": "../../../../../security/credentials/gcsCredentials.json" + "$ref": "../../../../../security/credentials/gcpCredentials.json" } }, "additionalProperties": false diff --git a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/storage/gcsConnection.json b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/storage/gcsConnection.json index bafc88b64f6..6b181e1b13c 100644 --- a/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/storage/gcsConnection.json +++ b/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/storage/gcsConnection.json @@ -21,9 +21,9 @@ "default": "Gcs" }, "credentials": { - "title": "GCS Credentials", - "description": "GCS Credentials", - "$ref": "../../../../security/credentials/gcsCredentials.json" + "title": "GCP Credentials", + "description": "GCP Credentials", + "$ref": "../../../../security/credentials/gcpCredentials.json" }, "connectionOptions": { "title": "Connection Options", diff --git a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtGCSConfig.json b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtGCSConfig.json index 97a0e5bafde..9e80103e591 100644 --- a/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtGCSConfig.json +++ b/openmetadata-spec/src/main/resources/json/schema/metadataIngestion/dbtconfig/dbtGCSConfig.json @@ -7,7 +7,7 @@ "properties": { "dbtSecurityConfig": { "title": "DBT GCS Security Config", - "$ref": "../../security/credentials/gcsCredentials.json" + "$ref": "../../security/credentials/gcpCredentials.json" }, "dbtPrefixConfig": { "title": "DBT Prefix Config", diff --git a/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpCredentials.json b/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpCredentials.json new file mode 100644 index 00000000000..7a10b77e9c8 --- /dev/null +++ b/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpCredentials.json @@ -0,0 +1,31 @@ +{ + "$id": "https://open-metadata.org/security/credentials/gcpCredentials.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "GCPCredentials", + "description": "GCP credentials configs.", + "type": "object", + "javaType": "org.openmetadata.schema.security.credentials.GCPCredentials", + "definitions": { + "gcpCredentialsPath": { + "title": "GCP Credentials Path", + "description": "Pass the path of file containing the GCP credentials info", + "type": "string" + } + }, + "properties": { + "gcpConfig": { + "title": "GCP Credentials Configuration", + "description": "We support two ways of authenticating to GCP i.e via GCP Credentials Values or GCP Credentials Path", + "oneOf": [ + { + "$ref": "gcpValues.json" + }, + { + "$ref": "#/definitions/gcpCredentialsPath" + } + ] + } + }, + "additionalProperties": false, + "required": ["gcpConfig"] +} \ No newline at end of file diff --git a/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsValues.json b/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpValues.json similarity index 88% rename from openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsValues.json rename to openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpValues.json index d802f06da00..945b75c3fb3 100644 --- a/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsValues.json +++ b/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcpValues.json @@ -1,10 +1,10 @@ { - "$id": "https://open-metadata.org/security/credentials/gcsValues.json", + "$id": "https://open-metadata.org/security/credentials/gcpValues.json", "$schema": "http://json-schema.org/draft-07/schema#", "type": "object", - "javaType": "org.openmetadata.schema.security.credentials.GCSValues", - "title": "GCS Credentials Values", - "description": "Pass the raw credential values provided by GCS", + "javaType": "org.openmetadata.schema.security.credentials.GCPValues", + "title": "GCP Credentials Values", + "description": "Pass the raw credential values provided by GCP", "definitions": { "singleProjectId": { "title": "Single Project ID", @@ -21,7 +21,7 @@ "properties": { "type": { "title": "Credentials Type", - "description": "Google Cloud service account type.", + "description": "Google Cloud Platform account type.", "type": "string" }, "projectId": { diff --git a/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsCredentials.json b/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsCredentials.json deleted file mode 100644 index cc5d4986ae1..00000000000 --- a/openmetadata-spec/src/main/resources/json/schema/security/credentials/gcsCredentials.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "$id": "https://open-metadata.org/security/credentials/gcsCredentials.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "GCSCredentials", - "description": "GCS credentials configs.", - "type": "object", - "javaType": "org.openmetadata.schema.security.credentials.GCSCredentials", - "definitions": { - "GCSCredentialsPath": { - "title": "GCS Credentials Path", - "description": "Pass the path of file containing the GCS credentials info", - "type": "string" - } - }, - "properties": { - "gcsConfig": { - "title": "GCS Credentials Configuration", - "description": "We support two ways of authenticating to GCS i.e via GCS Credentials Values or GCS Credentials Path", - "oneOf": [ - { - "$ref": "gcsValues.json" - }, - { - "$ref": "#/definitions/GCSCredentialsPath" - } - ] - } - }, - "additionalProperties": false, - "required": ["gcsConfig"] -} \ No newline at end of file diff --git a/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/bigquery.spec.js b/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/bigquery.spec.js index ca71d5d465a..fa98f2f49e2 100644 --- a/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/bigquery.spec.js +++ b/openmetadata-ui/src/main/resources/ui/cypress/e2e/AddNewService/bigquery.spec.js @@ -37,31 +37,31 @@ describe('BigQuery Ingestion', () => { goToAddNewServicePage(SERVICE_TYPE.Database); const connectionInput = () => { const clientEmail = Cypress.env('bigqueryClientEmail'); - cy.get('.form-group > #root\\/credentials\\/gcsConfig\\/type') + cy.get('.form-group > #root\\/credentials\\/gcpConfig\\/type') .scrollIntoView() .type('service_account'); checkServiceFieldSectionHighlighting('type'); - cy.get('#root\\/credentials\\/gcsConfig\\/projectId') + cy.get('#root\\/credentials\\/gcpConfig\\/projectId') .scrollIntoView() .type(Cypress.env('bigqueryProjectId')); checkServiceFieldSectionHighlighting('projectId'); - cy.get('#root\\/credentials\\/gcsConfig\\/privateKeyId') + cy.get('#root\\/credentials\\/gcpConfig\\/privateKeyId') .scrollIntoView() .type(Cypress.env('bigqueryPrivateKeyId')); checkServiceFieldSectionHighlighting('privateKeyId'); - cy.get('#root\\/credentials\\/gcsConfig\\/privateKey') + cy.get('#root\\/credentials\\/gcpConfig\\/privateKey') .scrollIntoView() .type(Cypress.env('bigqueryPrivateKey')); checkServiceFieldSectionHighlighting('privateKey'); - cy.get('#root\\/credentials\\/gcsConfig\\/clientEmail') + cy.get('#root\\/credentials\\/gcpConfig\\/clientEmail') .scrollIntoView() .type(clientEmail); checkServiceFieldSectionHighlighting('clientEmail'); - cy.get('#root\\/credentials\\/gcsConfig\\/clientId') + cy.get('#root\\/credentials\\/gcpConfig\\/clientId') .scrollIntoView() .type(Cypress.env('bigqueryClientId')); checkServiceFieldSectionHighlighting('clientId'); - cy.get('#root\\/credentials\\/gcsConfig\\/clientX509CertUrl') + cy.get('#root\\/credentials\\/gcpConfig\\/clientX509CertUrl') .scrollIntoView() .type( `https://www.googleapis.com/robot/v1/metadata/x509/${encodeURIComponent( diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/BigQuery.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/BigQuery.md index 2389de267b0..79e6ccca11a 100644 --- a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/BigQuery.md +++ b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/BigQuery.md @@ -56,13 +56,13 @@ BigQuery APIs URL. By default, the API URL is `bigquery.googleapis.com`. You can $$ $$section -### GCS Credentials Configuration $(id="gcsConfig") +### GCP Credentials Configuration $(id="gcpConfig") -You can authenticate with your BigQuery instance using either `GCS Credentials Path` where you can specify the file path of the service account key, or you can pass the values directly by choosing the `GCS Credentials Values` from the service account key file. +You can authenticate with your BigQuery instance using either `GCP Credentials Path` where you can specify the file path of the service account key, or you can pass the values directly by choosing the `GCP Credentials Values` from the service account key file. You can check [this](https://cloud.google.com/iam/docs/keys-create-delete#iam-service-account-keys-create-console) documentation on how to create the service account keys and download it. -If you want to use [ADC authentication](https://cloud.google.com/docs/authentication#adc) for BigQuery you can just leave the GCS credentials empty. +If you want to use [ADC authentication](https://cloud.google.com/docs/authentication#adc) for BigQuery you can just leave the GCP credentials empty. $$ diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/Datalake.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/Datalake.md index a015478cb78..0657705b055 100644 --- a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/Datalake.md +++ b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/Datalake.md @@ -71,9 +71,9 @@ To get the Client ID (also known as application ID), follow these steps: To find the GCS service account Client ID from a service account file, you can open the JSON file and look for the `client_id` field. Here are the steps: -1. Open the JSON file for the GCS service account in a text editor or IDE. +1. Open the JSON file for the GCP service account in a text editor or IDE. 2. Look for the `client_id` field, which should be listed under the `private_key` object. -3. The value of the `client_id` field is the GCS service account Client ID. +3. The value of the `client_id` field is the GCP service account Client ID. $$ @@ -118,7 +118,7 @@ $$ ## GCS $$section -### GCS Credentials Configuration $(id="gcsConfig") +### GCP Credentials Configuration $(id="gcsConfig") - **GCS credentials value**: Users can choose to pass their Google Cloud Storage (GCS) credentials as a JSON object. This approach involves directly including the credentials in the code or environment variable. - **GCS Credentials Path**: Users can choose to pass the path of their GCS credentials file. This approach involves storing the credentials in a file, and providing the path to the file in the code or environment variable. $$ diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/workflows/dbt.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/workflows/dbt.md index 9a324b45a0e..8db01db501a 100644 --- a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/workflows/dbt.md +++ b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Database/workflows/dbt.md @@ -12,7 +12,7 @@ You can choose one of the 5 sources to fetch the dbt files: 2. **dbt HTTP Config**: Config to fetch dbt files from an HTTP or File Server. 3. **dbt Cloud Config**: Config to fetch the dbt files from dbt cloud APIs 4. **dbt S3 Config**: Config to fetch the dbt files from s3. -5. **dbt GCS Config**: Config to fetch the dbt files from gcs. +5. **dbt GCS Config**: Config to fetch the dbt files from gcp. $$ $$section diff --git a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Pipeline/Airflow.md b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Pipeline/Airflow.md index 01fbfa60a5e..8db8af69285 100644 --- a/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Pipeline/Airflow.md +++ b/openmetadata-ui/src/main/resources/ui/public/locales/en-US/Pipeline/Airflow.md @@ -31,7 +31,7 @@ $$section Select your underlying database connection. We support the [official](https://airflow.apache.org/docs/apache-airflow/stable/howto/set-up-database.html) backends from Airflow. -Note that the **Backend Connection** is only used to extract metadata from a DAG running directly in your instance, for example to get the metadata out of [GCS Composer](https://docs.open-metadata.org/connectors/pipeline/airflow/gcs). +Note that the **Backend Connection** is only used to extract metadata from a DAG running directly in your instance, for example to get the metadata out of [GCS Composer](https://docs.open-metadata.org/connectors/pipeline/airflow/gcp). $$ diff --git a/openmetadata-ui/src/main/resources/ui/src/components/ServiceConnectionDetails/ServiceConnectionDetails.component.tsx b/openmetadata-ui/src/main/resources/ui/src/components/ServiceConnectionDetails/ServiceConnectionDetails.component.tsx index 804df4be1d7..28302c2e748 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/ServiceConnectionDetails/ServiceConnectionDetails.component.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/ServiceConnectionDetails/ServiceConnectionDetails.component.tsx @@ -76,9 +76,9 @@ const ServiceConnectionDetails = ({ serviceCategory.slice(0, -1) === EntityType.DATABASE_SERVICE && key === 'credentials' ) { - // Condition for GCS Credentials path + // Condition for GCP Credentials path const newSchemaPropertyObject = - schemaPropertyObject[key].definitions.GCSCredentialsPath; + schemaPropertyObject[key].definitions.gcpCredentialsPath; return getKeyValues(value, newSchemaPropertyObject); } else if ( @@ -86,7 +86,7 @@ const ServiceConnectionDetails = ({ key === 'configSource' ) { if (isObject(value.securityConfig)) { - if (!value.securityConfig.gcsConfig) { + if (!value.securityConfig.gcpConfig) { if (Object.keys(schemaPropertyObject[key]).includes(oneOf)) { if ( value.securityConfig?.awsAccessKeyId || @@ -113,24 +113,24 @@ const ServiceConnectionDetails = ({ return getKeyValues(value, newSchemaPropertyObject); } } else { - if (isObject(value.securityConfig.gcsConfig)) { - // Condition for GCS Credentials value + if (isObject(value.securityConfig.gcpConfig)) { + // Condition for GCP Credentials value return getKeyValues( - value.securityConfig.gcsConfig, + value.securityConfig.gcpConfig, get( schema, - 'definitions.GCSConfig.properties.securityConfig.definitions.GCSValues.properties', + 'definitions.GCPConfig.properties.securityConfig.definitions.GCPValues.properties', {} ) ); } else { - // Condition for GCS Credentials path + // Condition for GCP Credentials path return getKeyValues( value, get( schema, - 'definitions.GCSConfig.properties.securityConfig.definitions.GCSCredentialsPath', + 'definitions.GCPConfig.properties.securityConfig.definitions.gcpCredentialsPath', {} ) ); diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigForm.interface.ts b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigForm.interface.ts index 2b67a44d6a3..dca1688e49a 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigForm.interface.ts +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigForm.interface.ts @@ -13,9 +13,9 @@ import { FormSubmitType } from '../../../enums/form.enum'; import { + Credentials, DbtConfig, - GCSCredentialsValues, - SCredentials, + GCPCredentialsValues, } from '../../../generated/metadataIngestion/dbtPipeline'; import { AddIngestionState, @@ -80,7 +80,7 @@ export type DbtConfigS3GCS = Pick< >; export type DbtS3Creds = Pick< - SCredentials, + Credentials, | 'awsAccessKeyId' | 'awsRegion' | 'awsSecretAccessKey' @@ -100,7 +100,7 @@ export interface DbtSourceTypes { gcsType?: GCS_CONFIG; } -export type DbtGCSCreds = GCSCredentialsValues; +export type DbtGCSCreds = GCPCredentialsValues; export type ErrorDbtCloud = Record; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigFormBuilder.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigFormBuilder.tsx index 021dfc600b0..059f090ae65 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigFormBuilder.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTConfigFormBuilder.tsx @@ -273,7 +273,7 @@ const DBTConfigFormBuilder: FunctionComponent = ({ dbtConfigSourceType: currentDbtConfigSourceType, dbtConfigSource: { dbtSecurityConfig: { - gcsConfig: + gcpConfig: currentGcsConfigType === GCS_CONFIG.GCSValues ? { type: value?.type, @@ -287,7 +287,7 @@ const DBTConfigFormBuilder: FunctionComponent = ({ authProviderX509CertUrl: value?.authProviderX509CertUrl, clientX509CertUrl: value?.clientX509CertUrl, } - : value?.GCSCredentialsPath, + : value?.gcpCredentialsPath, }, dbtPrefixConfig: { dbtBucketName: value?.dbtBucketName, diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.test.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.test.tsx index 0b94c9ac0df..bf3b033e564 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.test.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.test.tsx @@ -13,8 +13,8 @@ import { fireEvent, getByTestId, render } from '@testing-library/react'; import { + Credentials, DBTBucketDetails, - SCredentials, } from 'generated/metadataIngestion/dbtPipeline'; import React from 'react'; import { GCS_CONFIG } from './DBTFormEnum'; @@ -25,7 +25,7 @@ const mockProps = { dbtUpdateDescriptions: false, enableDebugLog: false, dbtClassificationName: '', - dbtSecurityConfig: {} as SCredentials, + dbtSecurityConfig: {} as Credentials, dbtPrefixConfig: {} as DBTBucketDetails, }; diff --git a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.tsx b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.tsx index d7500f5e918..a800db6af67 100644 --- a/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.tsx +++ b/openmetadata-ui/src/main/resources/ui/src/components/common/DBTConfigFormBuilder/DBTGCSConfig.tsx @@ -15,7 +15,7 @@ import { t } from 'i18next'; import { FieldProp, FieldTypes } from 'interface/FormUtils.interface'; import React, { Fragment, FunctionComponent } from 'react'; import { generateFormFields, getField } from 'utils/formUtils'; -import { GCSCredentialsValues } from '../../../generated/metadataIngestion/dbtPipeline'; +import { GCPCredentialsValues } from '../../../generated/metadataIngestion/dbtPipeline'; import DBTCommonFields from './DBTCommonFields.component'; import { DbtConfigS3GCS } from './DBTConfigForm.interface'; import { GCSCreds } from './DBTFormConstants'; @@ -64,7 +64,7 @@ export const DBTGCSConfig: FunctionComponent = ({ }, ]; - const gcsCredConfigs = (gcsConfig?: GCSCredentialsValues) => { + const gcsCredConfigs = (gcsConfig?: GCPCredentialsValues) => { const gcsCredConfigFields: FieldProp[] = [ { name: 'type', @@ -218,7 +218,7 @@ export const DBTGCSConfig: FunctionComponent = ({ }, id: 'root/GCSCredentialsPath', formItemProps: { - initialValue: dbtSecurityConfig?.gcsConfig || '', + initialValue: dbtSecurityConfig?.gcpConfig || '', }, }, ]; @@ -241,7 +241,7 @@ export const DBTGCSConfig: FunctionComponent = ({ })} {gcsType === GCS_CONFIG.GCSValues - ? gcsCredConfigs(dbtSecurityConfig?.gcsConfig as GCSCredentialsValues) + ? gcsCredConfigs(dbtSecurityConfig?.gcpConfig as GCPCredentialsValues) : generateFormFields(gcsCredPathFields)} {generateFormFields(dbtPrefixConfigFields)} diff --git a/openmetadata-ui/src/main/resources/ui/src/utils/DBTConfigFormUtil.ts b/openmetadata-ui/src/main/resources/ui/src/utils/DBTConfigFormUtil.ts index 42c54ed9a63..d0cb969835d 100644 --- a/openmetadata-ui/src/main/resources/ui/src/utils/DBTConfigFormUtil.ts +++ b/openmetadata-ui/src/main/resources/ui/src/utils/DBTConfigFormUtil.ts @@ -41,10 +41,11 @@ import i18next from 'i18next'; import { isEmpty, isNil, isString } from 'lodash'; import { FormValidationRulesType } from '../enums/form.enum'; import { + Credentials, DbtConfig, - GCSCredentialsValues, - SCredentials, + GCPCredentialsValues, } from '../generated/metadataIngestion/dbtPipeline'; + import { FormValidationRules } from '../interface/FormUtils.interface'; import { isValidEmail, isValidUrl } from './CommonUtils'; @@ -109,7 +110,7 @@ export const validateDbtHttpConfig = ( }; export const validateDbtS3Config = ( - data: SCredentials, + data: Credentials, requiredFields = reqDBTS3Fields ) => { let isValid = true; @@ -181,7 +182,7 @@ function getInvalidUrlErrors< } export const checkDbtS3CredsConfigRules = ( - data: SCredentials, + data: Credentials, ruleFields = rulesDBTS3CredsFields ) => { let isValid = true; @@ -198,7 +199,7 @@ export const checkDbtS3CredsConfigRules = ( }; export const checkDbtGCSCredsConfigRules = ( - data: GCSCredentialsValues, + data: GCPCredentialsValues, ruleFields = rulesDBTGCSCredsFields ) => { let isValid = true; @@ -237,9 +238,9 @@ export const getSourceTypeFromConfig = ( let gcsType = undefined; if (data) { if (!isNil(data.dbtSecurityConfig)) { - if (!isNil(data.dbtSecurityConfig.gcsConfig)) { + if (!isNil(data.dbtSecurityConfig.gcpConfig)) { sourceType = DBT_SOURCES.gcs; - gcsType = isString(data.dbtSecurityConfig.gcsConfig) + gcsType = isString(data.dbtSecurityConfig.gcpConfig) ? GCS_CONFIG.GCSCredentialsPath : GCS_CONFIG.GCSValues; } else {