Fix Py tests (#4122)

Fix Py tests (#4122)
This commit is contained in:
Pere Miquel Brull 2022-04-14 11:22:39 +02:00 committed by GitHub
parent 735cc54614
commit 6768bf708d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 78 additions and 77 deletions

View File

@ -9,8 +9,8 @@
"clickhouseType": {
"description": "Service type.",
"type": "string",
"enum": ["ClickHouse"],
"default": "ClickHouse"
"enum": ["Clickhouse"],
"default": "Clickhouse"
},
"clickhouseScheme": {
"description": "SQLAlchemy driver scheme options.",
@ -23,7 +23,7 @@
"type": {
"description": "Service Type",
"$ref": "#/definitions/clickhouseType",
"default": "ClickHouse"
"default": "Clickhouse"
},
"scheme": {
"description": "SQLAlchemy driver scheme options.",

View File

@ -9,21 +9,21 @@
"mssqlType": {
"description": "Service type.",
"type": "string",
"enum": ["MSSQL"],
"default": "MSSQL"
"enum": ["Mssql"],
"default": "Mssql"
},
"mssqlScheme": {
"description": "SQLAlchemy driver scheme options.",
"type": "string",
"enum": ["mssql+pyodbc", "mssql+pytds", "mssql+pymssql"],
"default": "mysql+pymysql"
"default": "mssql+pytds"
}
},
"properties": {
"type": {
"description": "Service Type",
"$ref": "#/definitions/mssqlType",
"default": "MSSQL"
"default": "Mssql"
},
"scheme": {
"description": "SQLAlchemy driver scheme options.",
@ -61,5 +61,9 @@
"$ref": "../connectionBasicType.json#/definitions/supportsMetadataExtraction"
}
},
"additionalProperties": false
"additionalProperties": false,
"required": [
"hostPort",
"username"
]
}

View File

@ -14,7 +14,7 @@
"Redshift",
"Snowflake",
"Postgres",
"MSSQL",
"Mssql",
"Hive",
"Oracle",
"Athena",
@ -25,7 +25,7 @@
"MariaDB",
"Druid",
"Db2",
"ClickHouse",
"Clickhouse",
"Databricks",
"DynamoDB",
"AzureSQL",
@ -52,7 +52,7 @@
"name": "Postgres"
},
{
"name": "MSSQL"
"name": "Mssql"
},
{
"name": "Oracle"
@ -85,7 +85,7 @@
"name": "Db2"
},
{
"name": "ClickHouse"
"name": "Clickhouse"
},
{
"name": "Databricks"

View File

@ -292,7 +292,7 @@ class AmundsenSource(Source[Entity]):
"name": service_name,
"description": "",
"serviceType": self.database_service_map.get(
service_type.lower(), DatabaseServiceType.MySQL.value
service_type.lower(), DatabaseServiceType.Mysql.value
),
"connection": {"config": {}},
}

View File

@ -64,7 +64,7 @@ class MssqlUsageSource(Source[TableQuery]):
self.alchemy_helper = SQLAlchemyHelper(
self.connection,
metadata_config,
DatabaseServiceType.MSSQL.value,
DatabaseServiceType.Mssql.value,
self.sql_stmt,
)
self.report = SQLSourceStatus()

View File

@ -17,7 +17,6 @@ import psycopg2
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.inspection import inspect
from metadata.config.common import FQDN_SEPARATOR
from metadata.generated.schema.entity.data.database import Database
from metadata.generated.schema.entity.services.connections.database.postgresConnection import (
PostgresConnection,

View File

@ -64,7 +64,7 @@ class AirflowLineageTest(TestCase):
service = CreateDatabaseServiceRequest(
name="test-service-table-lineage",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=DatabaseConnection(
config=MysqlConnection(
username="username",

View File

@ -56,7 +56,7 @@ class OMetaDatabaseTest(TestCase):
service = CreateDatabaseServiceRequest(
name="test-service-db",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=DatabaseConnection(
config=MysqlConnection(
username="username",

View File

@ -58,14 +58,14 @@ class OMetaDatabaseServiceTest(TestCase):
cls.entity = DatabaseService(
id=uuid.uuid4(),
name="test-db-service",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=cls.connection,
href="http://resource-uri/", # Dummy value, this is auto-generated by OM
)
cls.create = CreateDatabaseServiceRequest(
name="test-db-service",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=cls.connection,
)
@ -117,7 +117,7 @@ class OMetaDatabaseServiceTest(TestCase):
update_request = CreateDatabaseServiceRequest(
name="test-db-service",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=new_connection,
)

View File

@ -63,7 +63,7 @@ class OMetaLineageTest(TestCase):
db_service = CreateDatabaseServiceRequest(
name="test-service-db-lineage",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=DatabaseConnection(
config=MysqlConnection(
username="username",

View File

@ -199,7 +199,7 @@ class OMetaModelTest(TestCase):
service = CreateDatabaseServiceRequest(
name="test-service-table-ml",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=DatabaseConnection(
config=MysqlConnection(
username="username",

View File

@ -57,7 +57,7 @@ class OMetaServiceTest(TestCase):
"serviceName": "local_mysql",
"serviceConnection": {
"config": {
"type": "MySQL",
"type": "Mysql",
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "random:3306",
@ -73,7 +73,7 @@ class OMetaServiceTest(TestCase):
entity=DatabaseService, config=workflow_source
)
assert service
assert service.serviceType == DatabaseServiceType.MySQL
assert service.serviceType == DatabaseServiceType.Mysql
# Check get
assert service == self.metadata.get_service_or_create(
@ -92,10 +92,10 @@ class OMetaServiceTest(TestCase):
"serviceName": "local_mssql",
"serviceConnection": {
"config": {
"type": "MSSQL",
"type": "Mssql",
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "random:3306",
"hostPort": "random:1433",
}
},
"sourceConfig": {"config": {"enableDataProfiler": False}},
@ -108,7 +108,7 @@ class OMetaServiceTest(TestCase):
entity=DatabaseService, config=workflow_source
)
assert service
assert service.serviceType == DatabaseServiceType.MSSQL
assert service.serviceType == DatabaseServiceType.Mssql
# Check get
assert service == self.metadata.get_service_or_create(

View File

@ -86,7 +86,7 @@ class OMetaTableTest(TestCase):
service = CreateDatabaseServiceRequest(
name="test-service-table",
serviceType=DatabaseServiceType.MySQL,
serviceType=DatabaseServiceType.Mysql,
connection=DatabaseConnection(
config=MysqlConnection(
username="username",

View File

@ -262,16 +262,13 @@ class BigQueryIngestionTest(TestCase):
execute_workflow(json.loads(CONFIG))
def test_file_sink(self):
config = json.loads(CONFIG)
file_data = open(config["sink"]["config"]["filename"])
file_sink = json.load(file_data)
for ometa_data in file_sink:
table = ometa_data.get("table")
omdtable_obj: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(
ometa_data
)
table_obj: Table = Table.parse_obj(table)
_: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(ometa_data)
_: Table = Table.parse_obj(table)
assert table.get("description") == GET_TABLE_DESCRIPTIONS.get("text")
table_name = (
@ -281,7 +278,7 @@ class BigQueryIngestionTest(TestCase):
assert table_name in MOCK_GET_TABLE_NAMES
for column in table.get("columns"):
column_obj: Column = Column.parse_obj(column)
_: Column = Column.parse_obj(column)
if column in MOCK_UNIQUE_CONSTRAINTS:
assert Column.constraint.UNIQUE == column.get("constraint")
if column in MOCK_PK_CONSTRAINT.get("constrained_columns"):

View File

@ -39,9 +39,9 @@ CONFIG = """
"serviceName": "local_clickhouse",
"serviceConnection": {
"config": {
"type": "ClickHouse",
"username":"default",
"password":"",
"type": "Clickhouse",
"username": "default",
"password": "",
"hostPort": "localhost:8123",
"database": "default"
}
@ -213,14 +213,13 @@ class ClickhouseIngestionTest(TestCase):
execute_workflow()
def test_file_sink(self):
config = json.loads(CONFIG)
file_data = open(config["sink"]["config"]["filename"])
data = json.load(file_data)
for i in data:
table = i.get("table")
omdtable_obj: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
table_obj: Table = Table.parse_obj(table)
_: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
_: Table = Table.parse_obj(table)
assert table.get("description") == GET_TABLE_DESCRIPTIONS.get("text")
@ -228,7 +227,7 @@ class ClickhouseIngestionTest(TestCase):
assert table.get("name") in MOCK_GET_TABLE_NAMES
for column in table.get("columns"):
column_obj: Column = Column.parse_obj(column)
_: Column = Column.parse_obj(column)
if column in MOCK_UNIQUE_CONSTRAINTS[0].get("column_names"):
assert Column.constraint.UNIQUE == column.get("constraint")
if column in MOCK_PK_CONSTRAINT.get("constrained_columns"):

View File

@ -35,39 +35,43 @@ from metadata.ingestion.models.ometa_table_db import OMetaDatabaseAndTable
CONFIG = """
{
"source": {
"type": "mysql",
"serviceName": "local_mysql",
"serviceConnection": {
"config": {
"type": "MySQL",
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "localhost:3306",
"database": "openmetadata_db"
}
"source": {
"type": "mysql",
"serviceName": "local_mysql",
"serviceConnection": {
"config": {
"type": "Mysql",
"username": "openmetadata_user",
"password": "openmetadata_password",
"hostPort": "localhost:3306",
"database": "openmetadata_db"
}
},
"sourceConfig": {
"config": {
"enableDataProfiler": false,
"schemaFilterPattern": {
"excludes": [
"system.*",
"information_schema.*",
"INFORMATION_SCHEMA.*"
]
}
}
}
},
"sourceConfig": {
"sink": {
"type": "file",
"config": {
"enableDataProfiler": false,
"schemaFilterPattern":{
"excludes": ["system.*","information_schema.*","INFORMATION_SCHEMA.*"]
"filename": "/var/tmp/datasets.json"
}
}
}
},
"sink": {
"type": "file",
"config": {
"filename": "/var/tmp/datasets.json"
"workflowConfig": {
"openMetadataServerConfig": {
"hostPort": "http://localhost:8585/api",
"authProvider": "no-auth"
}
}
},
"workflowConfig": {
"openMetadataServerConfig": {
"hostPort": "http://localhost:8585/api",
"authProvider": "no-auth"
}
}
}
"""
@ -278,14 +282,13 @@ class MySqlIngestionTest(TestCase):
execute_workflow()
def test_file_sink(self):
config = json.loads(CONFIG)
file_data = open(config["sink"]["config"]["filename"])
data = json.load(file_data)
for i in data:
table = i.get("table")
omdtable_obj: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
table_obj: Table = Table.parse_obj(table)
_: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
_: Table = Table.parse_obj(table)
assert table.get("description") == GET_TABLE_DESCRIPTIONS.get("text")
@ -293,7 +296,7 @@ class MySqlIngestionTest(TestCase):
assert table.get("name") in MOCK_GET_TABLE_NAMES
for column in table.get("columns"):
column_obj: Column = Column.parse_obj(column)
_: Column = Column.parse_obj(column)
if column in MOCK_UNIQUE_CONSTRAINTS[0].get("column_names"):
assert Column.constraint.UNIQUE == column.get("constraint")
if column in MOCK_PK_CONSTRAINT.get("constrained_columns"):

View File

@ -215,14 +215,13 @@ class VerticaIngestionTest(TestCase):
get_view_definition.return_value = MOCK_GET_VIEW_DEFINITION
execute_workflow()
def test_file_sink(self):
config = json.loads(CONFIG)
file_data = open(config["sink"]["config"]["filename"])
data = json.load(file_data)
for i in data:
table = i.get("table")
omdtable_obj: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
table_obj: Table = Table.parse_obj(table)
_: OMetaDatabaseAndTable = OMetaDatabaseAndTable.parse_obj(i)
_: Table = Table.parse_obj(table)
assert table.get("description") == GET_TABLE_DESCRIPTIONS.get("text")
@ -230,7 +229,7 @@ class VerticaIngestionTest(TestCase):
assert table.get("name") in MOCK_GET_TABLE_NAMES
for column in table.get("columns"):
column_obj: Column = Column.parse_obj(column)
_: Column = Column.parse_obj(column)
if column in MOCK_UNIQUE_CONSTRAINTS[0].get("column_names"):
assert Column.constraint.UNIQUE == column.get("constraint")
if column in MOCK_PK_CONSTRAINT.get("constrained_columns"):