Fix #5210: Review database field for connectors (#5225)

* Fix #5210: Review database field for connectors
This commit is contained in:
Milan Bariya 2022-06-06 10:48:23 +05:30 committed by GitHub
parent 65330f24ef
commit ec1aca926a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 97 additions and 127 deletions

View File

@ -38,11 +38,6 @@
"type": "string",
"default": "bigquery.googleapis.com"
},
"projectId": {
"title": "Project ID",
"description": "BigQuery project ID. Only required if using credentials path instead of values.",
"type": "string"
},
"credentials": {
"title": "GCS Credentials",
"description": "GCS Credentials",
@ -54,11 +49,6 @@
"type": "string",
"default": "BigqueryPolicyTags"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"type": "string"
},
"partitionQueryDuration": {
"title": "Partition Query Duration",
"description": "Duration for partitioning BigQuery tables.",

View File

@ -48,11 +48,6 @@
"description": "Host and port of the Databricks service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank , OpenMetadata Ingestion attempts to scan all the databases in Databricks.",
"type": "string"
},
"token": {
"title": "Token",
"description": "Generated Token to connect to Databricks.",

View File

@ -48,9 +48,9 @@
"description": "Host and port of the DB2 service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"connectionOptions": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the Hive service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"authOptions": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the MariaDB service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"connectionOptions": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the MySQL service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"connectionOptions": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the Oracle service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"oracleServiceName": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the Presto service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"catalog": {

View File

@ -48,9 +48,9 @@
"description": "Host and port of the SingleStore service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"connectionOptions": {

View File

@ -53,9 +53,9 @@
"description": "Catalog of the data source.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases in the selected catalog.",
"databaseSchema": {
"title": "databaseSchema",
"description": "databaseSchema of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single databaseSchema. When left blank, OpenMetadata Ingestion attempts to scan all the databaseSchema.",
"type": "string"
},
"proxies": {

View File

@ -350,7 +350,7 @@ public class DatabaseServiceResourceTest extends EntityResourceTest<DatabaseServ
public static void validateMysqlConnection(
MysqlConnection expectedMysqlConnection, MysqlConnection actualMysqlConnection) {
assertEquals(expectedMysqlConnection.getDatabase(), actualMysqlConnection.getDatabase());
assertEquals(expectedMysqlConnection.getDatabaseSchema(), actualMysqlConnection.getDatabaseSchema());
assertEquals(expectedMysqlConnection.getHostPort(), actualMysqlConnection.getHostPort());
assertEquals(expectedMysqlConnection.getUsername(), actualMysqlConnection.getUsername());
assertEquals(expectedMysqlConnection.getPassword(), actualMysqlConnection.getPassword());
@ -363,7 +363,6 @@ public class DatabaseServiceResourceTest extends EntityResourceTest<DatabaseServ
assertEquals(expectedBigQueryConnection.getHostPort(), actualBigQueryConnection.getHostPort());
assertEquals(expectedBigQueryConnection.getCredentials(), actualBigQueryConnection.getCredentials());
assertEquals(expectedBigQueryConnection.getScheme(), actualBigQueryConnection.getScheme());
assertEquals(expectedBigQueryConnection.getDatabase(), actualBigQueryConnection.getDatabase());
assertEquals(
expectedBigQueryConnection.getConnectionArguments(), actualBigQueryConnection.getConnectionArguments());
assertEquals(expectedBigQueryConnection.getConnectionOptions(), actualBigQueryConnection.getConnectionOptions());

View File

@ -12,6 +12,7 @@ import os
import traceback
from typing import Optional, Tuple
from google import auth
from google.cloud.datacatalog_v1 import PolicyTagManagerClient
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy_bigquery import _types
@ -93,6 +94,7 @@ class BigquerySource(CommonDbSourceService):
self.config.serviceConnection.__root__.config
)
self.temp_credentials = None
self.project_id = self.set_project_id()
@classmethod
def create(cls, config_dict, metadata_config: OpenMetadataConnection):
@ -115,8 +117,11 @@ class BigquerySource(CommonDbSourceService):
raise ValueError(f"schema {schema} does not match table {table}")
return segments[0], segments[1]
def set_project_id(self):
_, project_id = auth.default()
return project_id
def prepare(self):
self.service_connection.database = self.service_connection.projectId
# and "policy_tags" in column and column["policy_tags"]
try:
if self.source_config.includeTags:
@ -196,14 +201,12 @@ class BigquerySource(CommonDbSourceService):
logger.debug(traceback.format_exc())
logger.error(err)
def get_database_entity(self, database: Optional[str]) -> Database:
if not database:
database = (
self.connection_config.projectId
or self.connection_config.credentials.gcsConfig.projectId
)
def _get_database_name(self) -> str:
return self.project_id or self.connection_config.credentials.gcsConfig.projectId
def get_database_entity(self) -> Database:
return Database(
name=database,
name=self._get_database_name(),
service=EntityReference(
id=self.service.id, type=self.service_connection.type.value
),
@ -216,7 +219,7 @@ class BigquerySource(CommonDbSourceService):
view_definition = ""
try:
view_definition = inspector.get_view_definition(
f"{self.service_connection.projectId}.{schema}.{table_name}"
f"{self.project_id}.{schema}.{table_name}"
)
view_definition = (
"" if view_definition is None else str(view_definition)

View File

@ -11,6 +11,7 @@
from ibm_db_sa.base import DB2Dialect
from sqlalchemy.engine import reflection
from sqlalchemy.engine.reflection import Inspector
from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
OpenMetadataConnection,
@ -46,3 +47,10 @@ class Db2Source(CommonDbSourceService):
f"Expected Db2Connection, but got {connection}"
)
return cls(config, metadata_config)
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -13,6 +13,7 @@ import re
from pyhive.sqlalchemy_hive import HiveDialect, _type_map
from sqlalchemy import types, util
from sqlalchemy.engine.reflection import Inspector
from metadata.generated.schema.entity.services.connections.database.hiveConnection import (
HiveConnection,
@ -125,7 +126,6 @@ HiveDialect.get_view_names = get_view_names
class HiveSource(CommonDbSourceService):
def prepare(self):
self.service_connection.database = "default"
return super().prepare()
@classmethod
@ -137,3 +137,10 @@ class HiveSource(CommonDbSourceService):
f"Expected HiveConnection, but got {connection}"
)
return cls(config, metadata_config)
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -43,6 +43,6 @@ class MariadbSource(CommonDbSourceService):
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.database
else [self.service_connection.database]
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -42,6 +42,6 @@ class MysqlSource(CommonDbSourceService):
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.database
else [self.service_connection.database]
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -10,7 +10,8 @@
# limitations under the License.
# This import verifies that the dependencies are available.
from typing import Optional
from sqlalchemy.engine.reflection import Inspector
from metadata.generated.schema.entity.data.database import Database
from metadata.generated.schema.entity.services.connections.database.oracleConnection import (
@ -45,12 +46,9 @@ class OracleSource(CommonDbSourceService):
)
return cls(config, metadata_config)
def get_database_entity(self, database: Optional[str]) -> Database:
if not database:
database = self.service_connection.oracleServiceName
return Database(
name=database,
service=EntityReference(
id=self.service.id, type=self.service_connection.type.value
),
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -79,9 +79,7 @@ class PostgresSource(CommonDbSourceService):
except Exception as err:
logger.error(f"Failed to Connect: {row[0]} due to error {err}")
def get_database_entity(self, database: str) -> Database:
if database:
self.service_connection.database = database
def get_database_entity(self) -> Database:
return Database(
name=self.service_connection.database,
service=EntityReference(id=self.service.id, type="database"),

View File

@ -97,7 +97,7 @@ class PrestoSource(CommonDbSourceService):
)
return cls(config, metadata_config)
def get_database_entity(self, _) -> Database:
def get_database_entity(self) -> Database:
return Database(
name=self.service_connection.catalog,
service=EntityReference(
@ -108,8 +108,8 @@ class PrestoSource(CommonDbSourceService):
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.database
else [self.service_connection.database]
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)
def prepare(self):

View File

@ -225,7 +225,7 @@ class SnowflakeSource(CommonDbSourceService):
if self.source_config.enableDataProfiler:
profile = self.run_profiler(table=table_entity, schema=schema)
table_entity.tableProfile = [profile] if profile else None
database = self.get_database_entity(self.service_connection.database)
database = self.get_database_entity()
table_schema_and_db = OMetaDatabaseAndTable(
table=table_entity,
database=database,

View File

@ -64,7 +64,7 @@ class TrinoSource(CommonDbSourceService):
)
return cls(config, metadata_config)
def get_database_entity(self, _) -> Database:
def get_database_entity(self) -> Database:
return Database(
name=self.trino_connection.catalog,
service=EntityReference(
@ -75,6 +75,6 @@ class TrinoSource(CommonDbSourceService):
def get_schemas(self, inspector: Inspector) -> str:
return (
inspector.get_schema_names()
if not self.service_connection.database
else [self.service_connection.database]
if not self.service_connection.databaseSchema
else [self.service_connection.databaseSchema]
)

View File

@ -100,6 +100,9 @@ def get_connection_url_common(connection):
if hasattr(connection, "database"):
url += f"/{connection.database}" if connection.database else ""
elif hasattr(connection, "databaseSchema"):
url += f"/{connection.databaseSchema}" if connection.databaseSchema else ""
options = (
connection.connectionOptions.dict()
if connection.connectionOptions
@ -185,8 +188,6 @@ def _(connection: TrinoConnection):
@get_connection_url.register
def _(connection: DatabricksConnection):
url = f"{connection.scheme.value}://token:{connection.token.get_secret_value()}@{connection.hostPort}"
if connection.database:
url += f"/{connection.database}"
return url
@ -200,8 +201,8 @@ def _(connection: PrestoConnection):
url += "@"
url += f"{connection.hostPort}"
url += f"/{connection.catalog}"
if connection.database:
url += f"?schema={quote_plus(connection.database)}"
if connection.databaseSchema:
url += f"?schema={quote_plus(connection.databaseSchema)}"
return url
@ -280,7 +281,7 @@ def _(connection: HiveConnection):
url += "@"
url += connection.hostPort
url += f"/{connection.database}" if connection.database else ""
url += f"/{connection.databaseSchema}" if connection.databaseSchema else ""
options = (
connection.connectionOptions.dict()
@ -288,8 +289,9 @@ def _(connection: HiveConnection):
else connection.connectionOptions
)
if options:
if not connection.database:
if not connection.databaseSchema:
url += "/"
url += "/"
params = "&".join(
f"{key}={quote_plus(value)}" for (key, value) in options.items() if value
)
@ -301,7 +303,9 @@ def _(connection: HiveConnection):
@get_connection_url.register
def _(connection: BigQueryConnection):
project_id = connection.projectId
from google import auth
_, project_id = auth.default()
if not project_id and isinstance(connection.credentials.gcsConfig, GCSValues):
project_id = connection.credentials.gcsConfig.projectId
if project_id:

View File

@ -63,7 +63,7 @@ CONFIG = """
}
}
"""
MOCK_GET_SOURCE_CONNECTION = "XXXXX-XXXX-XXXXX"
MOCK_GET_TABLE_NAMES = ["test_schema_1.test_table_1", "test_schema_1.test_table_2"]
GET_TABLE_DESCRIPTIONS = {"text": "Test"}
@ -232,12 +232,14 @@ class BigQueryIngestionTest(TestCase):
@patch("sqlalchemy.engine.reflection.Inspector.get_unique_constraints")
@patch("sqlalchemy.engine.reflection.Inspector.get_pk_constraint")
@patch("sqlalchemy.engine.reflection.Inspector.get_columns")
@patch("google.auth.default")
@patch("sqlalchemy.engine.base.Engine.connect")
@patch("sqlalchemy_bigquery._helpers.create_bigquery_client")
def test_bigquery_ingestion(
self,
mock_connect,
mock_create_bigquery_client,
auth_default,
get_columns,
get_pk_constraint,
get_unique_constraints,
@ -258,6 +260,7 @@ class BigQueryIngestionTest(TestCase):
get_view_names.return_value = MOCK_GET_VIEW_NAMES
get_view_definition.return_value = MOCK_GET_VIEW_DEFINITION
fetch_sample_data.return_value = []
auth_default.return_value = (None, MOCK_GET_SOURCE_CONNECTION)
get_indexes.return_value = MOCK_GET_INDEXES
execute_workflow(json.loads(CONFIG))

View File

@ -44,7 +44,6 @@ CONFIG = """
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "localhost:3306",
"database": "openmetadata_db",
"connectionOptions": {},
"connectionArguments": {}
}

View File

@ -234,7 +234,6 @@ def test_databricks():
"config": {
"token": "<databricks token>",
"hostPort": "localhost:443",
"database": "default",
"connectionArguments": {
"http_path": "<http path of databricks cluster>"
},
@ -257,7 +256,6 @@ def test_db2():
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "localhost:50000",
"database": "custom_database_name",
}
},
"sourceConfig": {"config": {"enableDataProfiler": False}},
@ -575,7 +573,6 @@ def test_presto():
"catalog": "tpcds",
"username": "admin",
"password": "password",
"database": "database",
}
},
"sourceConfig": {"config": {"generateSampleData": False}},
@ -685,7 +682,6 @@ def test_singlestore():
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "localhost:3306",
"database": "custom_database_name",
}
},
"sourceConfig": {"config": {"enableDataProfiler": False}},
@ -797,7 +793,6 @@ def test_trino():
"hostPort": "localhost:8080",
"username": "user",
"catalog": "tpcds",
"database": "tiny",
}
},
"sourceConfig": {"config": {}},

View File

@ -98,41 +98,38 @@ class SouceConnectionTest(TestCase):
def test_databricks_url_with_db(self):
expected_result = (
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443/default"
"databricks+connector://token:KlivDTACWXKmZVfN1qIM@1.1.1.1:443"
)
databricks_conn_obj = DatabricksConnection(
scheme=DatabricksScheme.databricks_connector,
hostPort="1.1.1.1:443",
token="KlivDTACWXKmZVfN1qIM",
database="default",
)
assert expected_result == get_connection_url(databricks_conn_obj)
def test_hive_url(self):
expected_result = "hive://localhost:10000/default"
expected_result = "hive://localhost:10000"
hive_conn_obj = HiveConnection(
scheme=HiveScheme.hive, hostPort="localhost:10000", database="default"
scheme=HiveScheme.hive, hostPort="localhost:10000"
)
assert expected_result == get_connection_url(hive_conn_obj)
def test_hive_url_custom_auth(self):
expected_result = "hive://username:password@localhost:10000/default"
expected_result = "hive://username:password@localhost:10000"
hive_conn_obj = HiveConnection(
scheme=HiveScheme.hive.value,
username="username",
password="password",
hostPort="localhost:10000",
database="default",
connectionArguments={"auth": "CUSTOM"},
)
assert expected_result == get_connection_url(hive_conn_obj)
def test_hive_url_with_kerberos_auth(self):
expected_result = "hive://localhost:10000/default"
expected_result = "hive://localhost:10000"
hive_conn_obj = HiveConnection(
scheme=HiveScheme.hive.value,
hostPort="localhost:10000",
database="default",
connectionArguments={
"auth": "KERBEROS",
"kerberos_service_name": "hive",
@ -142,13 +139,12 @@ class SouceConnectionTest(TestCase):
assert expected_result == get_connection_url(hive_conn_obj)
def test_hive_url_with_ldap_auth(self):
expected_result = "hive://username:password@localhost:10000/default"
expected_result = "hive://username:password@localhost:10000"
hive_conn_obj = HiveConnection(
scheme=HiveScheme.hive.value,
username="username",
password="password",
hostPort="localhost:10000",
database="default",
connectionArguments={"auth": "LDAP"},
)
assert expected_result == get_connection_url(hive_conn_obj)
@ -172,7 +168,6 @@ class SouceConnectionTest(TestCase):
password=None,
hostPort="localhost:443",
catalog="tpcds",
database="tiny",
connectionArguments=None,
scheme=TrinoScheme.trino,
)
@ -185,7 +180,6 @@ class SouceConnectionTest(TestCase):
password=None,
hostPort="localhost:443",
catalog="tpcds",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
scheme=TrinoScheme.trino,
)
@ -198,7 +192,6 @@ class SouceConnectionTest(TestCase):
password=None,
hostPort="localhost:443",
catalog="tpcds",
database="tiny",
connectionArguments=None,
proxies={"http": "foo.bar:3128", "http://host.name": "foo.bar:4012"},
scheme=TrinoScheme.trino,
@ -215,7 +208,6 @@ class SouceConnectionTest(TestCase):
password=None,
hostPort="localhost:443",
catalog="tpcds",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
proxies={"http": "foo.bar:3128", "http://host.name": "foo.bar:4012"},
scheme=TrinoScheme.trino,
@ -290,17 +282,15 @@ class SouceConnectionTest(TestCase):
username="openmetadata_user",
hostPort="localhost:3306",
scheme=MySQLScheme.mysql_pymysql,
database=None,
)
assert expected_url == get_connection_url(mysql_conn_obj)
# connection arguments with db
expected_url = "mysql+pymysql://openmetadata_user:@localhost:3306/default"
expected_url = "mysql+pymysql://openmetadata_user:@localhost:3306"
mysql_conn_obj = MysqlConnection(
username="openmetadata_user",
hostPort="localhost:3306",
scheme=MySQLScheme.mysql_pymysql,
database="default",
)
assert expected_url == get_connection_url(mysql_conn_obj)
@ -332,17 +322,15 @@ class SouceConnectionTest(TestCase):
username="openmetadata_user",
hostPort="localhost:3306",
scheme=MariaDBScheme.mysql_pymysql,
database=None,
)
assert expected_url == get_connection_url(mariadb_conn_obj)
# connection arguments with db
expected_url = "mysql+pymysql://openmetadata_user:@localhost:3306/default"
expected_url = "mysql+pymysql://openmetadata_user:@localhost:3306"
mariadb_conn_obj = MariaDBConnection(
username="openmetadata_user",
hostPort="localhost:3306",
scheme=MariaDBScheme.mysql_pymysql,
database="default",
)
assert expected_url == get_connection_url(mariadb_conn_obj)
@ -386,17 +374,15 @@ class SouceConnectionTest(TestCase):
username="openmetadata_user",
hostPort="localhost:5432",
scheme=SingleStoreScheme.mysql_pymysql,
database=None,
)
assert expected_url == get_connection_url(singleStore_conn_obj)
# connection arguments with db
expected_url = "mysql+pymysql://openmetadata_user:@localhost:5432/default"
expected_url = "mysql+pymysql://openmetadata_user:@localhost:5432"
singleStore_conn_obj = SingleStoreConnection(
username="openmetadata_user",
hostPort="localhost:5432",
scheme=SingleStoreScheme.mysql_pymysql,
database="default",
)
assert expected_url == get_connection_url(singleStore_conn_obj)
@ -407,17 +393,15 @@ class SouceConnectionTest(TestCase):
scheme=Db2Scheme.db2_ibm_db,
username="openmetadata_user",
hostPort="localhost:50000",
database=None,
)
assert expected_url == get_connection_url(db2_conn_obj)
# connection arguments with db
expected_url = "db2+ibm_db://openmetadata_user:@localhost:50000/default"
expected_url = "db2+ibm_db://openmetadata_user:@localhost:50000"
db2_conn_obj = Db2Connection(
username="openmetadata_user",
hostPort="localhost:50000",
scheme=Db2Scheme.db2_ibm_db,
database="default",
)
assert expected_url == get_connection_url(db2_conn_obj)
@ -452,7 +436,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database=None,
connectionArguments=None,
scheme=MySQLScheme.mysql_pymysql,
)
@ -464,7 +447,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
scheme=MySQLScheme.mysql_pymysql,
)
@ -502,7 +484,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database=None,
connectionArguments=None,
scheme=MariaDBScheme.mysql_pymysql,
)
@ -514,7 +495,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
scheme=MariaDBScheme.mysql_pymysql,
)
@ -577,7 +557,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments=None,
scheme=SingleStoreScheme.mysql_pymysql,
)
@ -589,7 +568,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
scheme=SingleStoreScheme.mysql_pymysql,
)
@ -602,7 +580,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments=None,
scheme=Db2Scheme.db2_ibm_db,
)
@ -614,7 +591,6 @@ class SouceConnectionTest(TestCase):
username="user",
password=None,
hostPort="localhost:443",
database="tiny",
connectionArguments={"user": "user-to-be-impersonated"},
scheme=Db2Scheme.db2_ibm_db,
)

View File

@ -48,11 +48,6 @@
"description": "Host and port of the Vertica service.",
"type": "string"
},
"database": {
"title": "Database",
"description": "Database of the data source. This is optional parameter, if you would like to restrict the metadata reading to a single database. When left blank, OpenMetadata Ingestion attempts to scan all the databases.",
"type": "string"
},
"connectionOptions": {
"title": "Connection Options",
"$ref": "../connectionBasicType.json#/definitions/connectionOptions"