Feat - Add import/export custom property for other entities - databaseService, database, databaseSchema level (#18385)

* Feat - Add import/export custom property for other entities - databaseService, database, databaseSchema level

* Fix java checkstyle

* Fix dbService level export

* minor: improve the inline edit of custom property

* minor: improve the number type icon

---------

Co-authored-by: Sachin Chaurasiya <sachinchaurasiyachotey87@gmail.com>
This commit is contained in:
sonika-shah 2024-10-30 16:32:47 +05:30 committed by GitHub
parent 57c22b5fbe
commit b42e211d29
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 188 additions and 18 deletions

View File

@ -13,6 +13,7 @@
package org.openmetadata.service.jdbi3; package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addExtension;
import static org.openmetadata.csv.CsvUtil.addField; import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwners; import static org.openmetadata.csv.CsvUtil.addOwners;
@ -68,6 +69,7 @@ public class DatabaseRepository extends EntityRepository<Database> {
"", "",
""); "");
supportsSearch = true; supportsSearch = true;
fieldFetchers.put("name", this::fetchAndSetService);
} }
@Override @Override
@ -125,7 +127,8 @@ public class DatabaseRepository extends EntityRepository<Database> {
(DatabaseSchemaRepository) Entity.getEntityRepository(DATABASE_SCHEMA); (DatabaseSchemaRepository) Entity.getEntityRepository(DATABASE_SCHEMA);
List<DatabaseSchema> schemas = List<DatabaseSchema> schemas =
repository.listAllForCSV( repository.listAllForCSV(
repository.getFields("owners,tags,domain"), database.getFullyQualifiedName()); repository.getFields("owners,tags,domain,extension"), database.getFullyQualifiedName());
schemas.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName)); schemas.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseCsv(database, user).exportCsv(schemas); return new DatabaseCsv(database, user).exportCsv(schemas);
} }
@ -224,6 +227,17 @@ public class DatabaseRepository extends EntityRepository<Database> {
return database; return database;
} }
private void fetchAndSetService(List<Database> entities, Fields fields) {
if (entities == null || entities.isEmpty() || (!fields.contains("name"))) {
return;
}
EntityReference service = getContainer(entities.get(0).getId());
for (Database database : entities) {
database.setService(service);
}
}
public class DatabaseUpdater extends EntityUpdater { public class DatabaseUpdater extends EntityUpdater {
public DatabaseUpdater(Database original, Database updated, Operation operation) { public DatabaseUpdater(Database original, Database updated, Operation operation) {
super(original, updated, operation); super(original, updated, operation);
@ -282,7 +296,8 @@ public class DatabaseRepository extends EntityRepository<Database> {
.withTags(tagLabels) .withTags(tagLabels)
.withRetentionPeriod(csvRecord.get(7)) .withRetentionPeriod(csvRecord.get(7))
.withSourceUrl(csvRecord.get(8)) .withSourceUrl(csvRecord.get(8))
.withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN))
.withExtension(getExtension(printer, csvRecord, 10));
if (processRecord) { if (processRecord) {
createEntity(printer, csvRecord, schema); createEntity(printer, csvRecord, schema);
} }
@ -306,6 +321,7 @@ public class DatabaseRepository extends EntityRepository<Database> {
? "" ? ""
: entity.getDomain().getFullyQualifiedName(); : entity.getDomain().getFullyQualifiedName();
addField(recordList, domain); addField(recordList, domain);
addExtension(recordList, entity.getExtension());
addRecord(csvFile, recordList); addRecord(csvFile, recordList);
} }
} }

View File

@ -14,6 +14,7 @@
package org.openmetadata.service.jdbi3; package org.openmetadata.service.jdbi3;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.addExtension;
import static org.openmetadata.csv.CsvUtil.addField; import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwners; import static org.openmetadata.csv.CsvUtil.addOwners;
@ -196,9 +197,11 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
public String exportToCsv(String name, String user) throws IOException { public String exportToCsv(String name, String user) throws IOException {
DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema
TableRepository repository = (TableRepository) Entity.getEntityRepository(TABLE); TableRepository repository = (TableRepository) Entity.getEntityRepository(TABLE);
List<Table> tables = List<Table> tables =
repository.listAllForCSV( repository.listAllForCSV(
repository.getFields("owners,tags,domain"), schema.getFullyQualifiedName()); repository.getFields("owners,tags,domain,extension"), schema.getFullyQualifiedName());
tables.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName)); tables.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseSchemaCsv(schema, user).exportCsv(tables); return new DatabaseSchemaCsv(schema, user).exportCsv(tables);
} }
@ -315,7 +318,8 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
.withRetentionPeriod(csvRecord.get(7)) .withRetentionPeriod(csvRecord.get(7))
.withSourceUrl(csvRecord.get(8)) .withSourceUrl(csvRecord.get(8))
.withColumns(nullOrEmpty(table.getColumns()) ? new ArrayList<>() : table.getColumns()) .withColumns(nullOrEmpty(table.getColumns()) ? new ArrayList<>() : table.getColumns())
.withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN))
.withExtension(getExtension(printer, csvRecord, 10));
if (processRecord) { if (processRecord) {
createEntity(printer, csvRecord, table); createEntity(printer, csvRecord, table);
@ -340,6 +344,7 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
? "" ? ""
: entity.getDomain().getFullyQualifiedName(); : entity.getDomain().getFullyQualifiedName();
addField(recordList, domain); addField(recordList, domain);
addExtension(recordList, entity.getExtension());
addRecord(csvFile, recordList); addRecord(csvFile, recordList);
} }
} }

View File

@ -13,6 +13,7 @@
package org.openmetadata.service.jdbi3; package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addExtension;
import static org.openmetadata.csv.CsvUtil.addField; import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwners; import static org.openmetadata.csv.CsvUtil.addOwners;
@ -68,7 +69,9 @@ public class DatabaseServiceRepository
DatabaseRepository repository = (DatabaseRepository) Entity.getEntityRepository(DATABASE); DatabaseRepository repository = (DatabaseRepository) Entity.getEntityRepository(DATABASE);
List<Database> databases = List<Database> databases =
repository.listAllForCSV( repository.listAllForCSV(
repository.getFields("owners,tags,domain"), databaseService.getFullyQualifiedName()); repository.getFields("name,owners,tags,domain,extension"),
databaseService.getFullyQualifiedName());
databases.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName)); databases.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseServiceCsv(databaseService, user).exportCsv(databases); return new DatabaseServiceCsv(databaseService, user).exportCsv(databases);
} }
@ -122,7 +125,8 @@ public class DatabaseServiceRepository
.withDescription(csvRecord.get(2)) .withDescription(csvRecord.get(2))
.withOwners(getOwners(printer, csvRecord, 3)) .withOwners(getOwners(printer, csvRecord, 3))
.withTags(tagLabels) .withTags(tagLabels)
.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN))
.withExtension(getExtension(printer, csvRecord, 8));
if (processRecord) { if (processRecord) {
createEntity(printer, csvRecord, database); createEntity(printer, csvRecord, database);
@ -145,6 +149,7 @@ public class DatabaseServiceRepository
? "" ? ""
: entity.getDomain().getFullyQualifiedName(); : entity.getDomain().getFullyQualifiedName();
addField(recordList, domain); addField(recordList, domain);
addExtension(recordList, entity.getExtension());
addRecord(csvFile, recordList); addRecord(csvFile, recordList);
} }
} }

View File

@ -84,6 +84,23 @@
"examples": [ "examples": [
"Marketing", "Sales" "Marketing", "Sales"
] ]
},
{
"name": "extension",
"required": false,
"description": "Custom property values added to the glossary term. Each field value (property and its value) is separated by `;` and internal values can be separated by `|`. For `entityReferenceList` type property, pass `type1:fqn1|type2:fqn2`. For single `entityReference` type property, pass `type:fqn`. Similarly, for `enumMultiSelect`, pass values separated by `|`, and for `enumSingleSelect`, pass a single value along with the property name. For `timeInterval` property type, pass the `startTime:endTime` to the property name. If the field value itself contains delimiter values like `,` and `;` or newline they need to be quoted, and the quotation needs to be further escaped. In general, if passing multiple field values separated by `;`, the extension column value needs to be quoted.",
"examples": [
"`customAttribute1:value1;customAttribute2:value2`",
"`\"dateCp:18-09-2024;dateTimeCp:18-09-2024 01:09:34;durationCp:PT5H30M10S;emailCp:admin@open-metadata.org\"`",
"`entRefListCp:searchIndex:elasticsearch_sample.table_search_index|databaseSchema:Glue.default.information_schema|databaseSchema:sample_data.ecommerce_db.shopify|database:Glue.default|`",
"`\"entRefCp:user:\"\"aaron.singh2\"\"\"`",
"`\"enumMultiSelectCp:val3|val2|val1|val4|val5;enumSingleSelectCp:singleVal1\"`",
"`\"timeCp:10:08:45;timeIntervalCp:1726142300000:17261420000;timeStampCp:1726142400000\"`",
"`\"integerCp:7777;numberCp:123456\"`",
"`\"\"\"queryCp:select col,row from table where id ='30';\"\";stringcp:sample string content\"`",
"`markdownCp:# Sample Markdown Text`",
"\"\"\"tableCp:row_1_col1_Value,row_1_col2_Value,row_1_col3_Value\"\"\""
]
} }
] ]
} }

View File

@ -84,6 +84,23 @@
"examples": [ "examples": [
"Marketing", "Sales" "Marketing", "Sales"
] ]
},
{
"name": "extension",
"required": false,
"description": "Custom property values added to the glossary term. Each field value (property and its value) is separated by `;` and internal values can be separated by `|`. For `entityReferenceList` type property, pass `type1:fqn1|type2:fqn2`. For single `entityReference` type property, pass `type:fqn`. Similarly, for `enumMultiSelect`, pass values separated by `|`, and for `enumSingleSelect`, pass a single value along with the property name. For `timeInterval` property type, pass the `startTime:endTime` to the property name. If the field value itself contains delimiter values like `,` and `;` or newline they need to be quoted, and the quotation needs to be further escaped. In general, if passing multiple field values separated by `;`, the extension column value needs to be quoted.",
"examples": [
"`customAttribute1:value1;customAttribute2:value2`",
"`\"dateCp:18-09-2024;dateTimeCp:18-09-2024 01:09:34;durationCp:PT5H30M10S;emailCp:admin@open-metadata.org\"`",
"`entRefListCp:searchIndex:elasticsearch_sample.table_search_index|databaseSchema:Glue.default.information_schema|databaseSchema:sample_data.ecommerce_db.shopify|database:Glue.default|`",
"`\"entRefCp:user:\"\"aaron.singh2\"\"\"`",
"`\"enumMultiSelectCp:val3|val2|val1|val4|val5;enumSingleSelectCp:singleVal1\"`",
"`\"timeCp:10:08:45;timeIntervalCp:1726142300000:17261420000;timeStampCp:1726142400000\"`",
"`\"integerCp:7777;numberCp:123456\"`",
"`\"\"\"queryCp:select col,row from table where id ='30';\"\";stringcp:sample string content\"`",
"`markdownCp:# Sample Markdown Text`",
"\"\"\"tableCp:row_1_col1_Value,row_1_col2_Value,row_1_col3_Value\"\"\""
]
} }
] ]
} }

View File

@ -68,6 +68,23 @@
"examples": [ "examples": [
"Marketing", "Sales" "Marketing", "Sales"
] ]
},
{
"name": "extension",
"required": false,
"description": "Custom property values added to the glossary term. Each field value (property and its value) is separated by `;` and internal values can be separated by `|`. For `entityReferenceList` type property, pass `type1:fqn1|type2:fqn2`. For single `entityReference` type property, pass `type:fqn`. Similarly, for `enumMultiSelect`, pass values separated by `|`, and for `enumSingleSelect`, pass a single value along with the property name. For `timeInterval` property type, pass the `startTime:endTime` to the property name. If the field value itself contains delimiter values like `,` and `;` or newline they need to be quoted, and the quotation needs to be further escaped. In general, if passing multiple field values separated by `;`, the extension column value needs to be quoted.",
"examples": [
"`customAttribute1:value1;customAttribute2:value2`",
"`\"dateCp:18-09-2024;dateTimeCp:18-09-2024 01:09:34;durationCp:PT5H30M10S;emailCp:admin@open-metadata.org\"`",
"`entRefListCp:searchIndex:elasticsearch_sample.table_search_index|databaseSchema:Glue.default.information_schema|databaseSchema:sample_data.ecommerce_db.shopify|database:Glue.default|`",
"`\"entRefCp:user:\"\"aaron.singh2\"\"\"`",
"`\"enumMultiSelectCp:val3|val2|val1|val4|val5;enumSingleSelectCp:singleVal1\"`",
"`\"timeCp:10:08:45;timeIntervalCp:1726142300000:17261420000;timeStampCp:1726142400000\"`",
"`\"integerCp:7777;numberCp:123456\"`",
"`\"\"\"queryCp:select col,row from table where id ='30';\"\";stringcp:sample string content\"`",
"`markdownCp:# Sample Markdown Text`",
"\"\"\"tableCp:row_1_col1_Value,row_1_col2_Value,row_1_col3_Value\"\"\""
]
} }
] ]
} }

View File

@ -108,7 +108,8 @@
"`\"timeCp:10:08:45;timeIntervalCp:1726142300000:17261420000;timeStampCp:1726142400000\"`", "`\"timeCp:10:08:45;timeIntervalCp:1726142300000:17261420000;timeStampCp:1726142400000\"`",
"`\"integerCp:7777;numberCp:123456\"`", "`\"integerCp:7777;numberCp:123456\"`",
"`\"\"\"queryCp:select col,row from table where id ='30';\"\";stringcp:sample string content\"`", "`\"\"\"queryCp:select col,row from table where id ='30';\"\";stringcp:sample string content\"`",
"`markdownCp:# Sample Markdown Text`" "`markdownCp:# Sample Markdown Text`",
"\"\"\"tableCp:row_1_col1_Value,row_1_col2_Value,row_1_col3_Value\"\"\""
] ]
} }
] ]

View File

@ -121,7 +121,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Update databaseSchema with invalid tags field // Update databaseSchema with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS)); String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,,";
String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null); String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(databaseName, csv, false); CsvImportResult result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1); assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
@ -132,7 +132,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
assertRows(result, expectedRows); assertRows(result, expectedRows);
// invalid tag it will give error. // invalid tag it will give error.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(databaseName, csv, false); result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1); assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
@ -144,7 +144,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
// databaseSchema will be created if it does not exist // databaseSchema will be created if it does not exist
String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing"); String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing");
record = "non-existing,dsp1,dsc1,,,,,,,"; record = "non-existing,dsp1,dsc1,,,,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(databaseName, csv, false); result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0);
@ -168,7 +168,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
// Update terms with change in description // Update terms with change in description
String record = String record =
String.format( String.format(
"s1,dsp1,new-dsc1,user:%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", "s1,dsp1,new-dsc1,user:%s,,,Tier.Tier1,P23DT23H,http://test.com,%s,",
user1, escapeCsv(DOMAIN.getFullyQualifiedName())); user1, escapeCsv(DOMAIN.getFullyQualifiedName()));
// Update created entity with changes // Update created entity with changes

View File

@ -120,7 +120,7 @@ public class DatabaseSchemaResourceTest
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Create table with invalid tags field // Create table with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS)); String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,,";
String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(schemaName, csv, false); CsvImportResult result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1); assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
@ -131,7 +131,7 @@ public class DatabaseSchemaResourceTest
assertRows(result, expectedRows); assertRows(result, expectedRows);
// Tag will cause failure // Tag will cause failure
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(schemaName, csv, false); result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1); assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
@ -142,7 +142,7 @@ public class DatabaseSchemaResourceTest
assertRows(result, expectedRows); assertRows(result, expectedRows);
// non-existing table will cause // non-existing table will cause
record = "non-existing,dsp1,dsc1,,,,,,,"; record = "non-existing,dsp1,dsc1,,,,,,,,";
String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing"); String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing");
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(schemaName, csv, false); result = importCsv(schemaName, csv, false);
@ -167,7 +167,7 @@ public class DatabaseSchemaResourceTest
List<String> updateRecords = List<String> updateRecords =
listOf( listOf(
String.format( String.format(
"s1,dsp1,new-dsc1,user:%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", "s1,dsp1,new-dsc1,user:%s,,,Tier.Tier1,P23DT23H,http://test.com,%s,",
user1, escapeCsv(DOMAIN.getFullyQualifiedName()))); user1, escapeCsv(DOMAIN.getFullyQualifiedName())));
// Update created entity with changes // Update created entity with changes

View File

@ -15,10 +15,16 @@ package org.openmetadata.service.resources.services;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.OK; import static javax.ws.rs.core.Response.Status.OK;
import static org.apache.commons.lang.StringEscapeUtils.escapeCsv;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.openmetadata.common.utils.CommonUtil.listOf;
import static org.openmetadata.csv.CsvUtil.recordToString;
import static org.openmetadata.csv.EntityCsv.entityNotFound;
import static org.openmetadata.csv.EntityCsvTest.*;
import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.service.exception.CatalogExceptionMessage.invalidEnumValue; import static org.openmetadata.service.exception.CatalogExceptionMessage.invalidEnumValue;
import static org.openmetadata.service.util.EntityUtil.fieldAdded; import static org.openmetadata.service.util.EntityUtil.fieldAdded;
import static org.openmetadata.service.util.EntityUtil.fieldUpdated; import static org.openmetadata.service.util.EntityUtil.fieldUpdated;
@ -35,14 +41,18 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import javax.ws.rs.client.WebTarget; import javax.ws.rs.client.WebTarget;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.HttpResponseException; import org.apache.http.client.HttpResponseException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.TestInfo;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.api.data.CreateDatabase;
import org.openmetadata.schema.api.services.CreateDatabaseService; import org.openmetadata.schema.api.services.CreateDatabaseService;
import org.openmetadata.schema.api.services.CreateDatabaseService.DatabaseServiceType; import org.openmetadata.schema.api.services.CreateDatabaseService.DatabaseServiceType;
import org.openmetadata.schema.api.services.DatabaseConnection; import org.openmetadata.schema.api.services.DatabaseConnection;
import org.openmetadata.schema.api.services.ingestionPipelines.CreateIngestionPipeline; import org.openmetadata.schema.api.services.ingestionPipelines.CreateIngestionPipeline;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.entity.services.DatabaseService; import org.openmetadata.schema.entity.services.DatabaseService;
import org.openmetadata.schema.entity.services.connections.TestConnectionResult; import org.openmetadata.schema.entity.services.connections.TestConnectionResult;
import org.openmetadata.schema.entity.services.connections.TestConnectionResultStatus; import org.openmetadata.schema.entity.services.connections.TestConnectionResultStatus;
@ -57,13 +67,18 @@ import org.openmetadata.schema.services.connections.database.MysqlConnection;
import org.openmetadata.schema.services.connections.database.RedshiftConnection; import org.openmetadata.schema.services.connections.database.RedshiftConnection;
import org.openmetadata.schema.services.connections.database.SnowflakeConnection; import org.openmetadata.schema.services.connections.database.SnowflakeConnection;
import org.openmetadata.schema.services.connections.database.common.basicAuth; import org.openmetadata.schema.services.connections.database.common.basicAuth;
import org.openmetadata.schema.type.ApiStatus;
import org.openmetadata.schema.type.ChangeDescription; import org.openmetadata.schema.type.ChangeDescription;
import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Schedule; import org.openmetadata.schema.type.Schedule;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity; import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseServiceRepository.DatabaseServiceCsv;
import org.openmetadata.service.resources.databases.DatabaseResourceTest;
import org.openmetadata.service.resources.services.database.DatabaseServiceResource.DatabaseServiceList; import org.openmetadata.service.resources.services.database.DatabaseServiceResource.DatabaseServiceList;
import org.openmetadata.service.resources.services.ingestionpipelines.IngestionPipelineResourceTest; import org.openmetadata.service.resources.services.ingestionpipelines.IngestionPipelineResourceTest;
import org.openmetadata.service.secrets.masker.PasswordEntityMasker; import org.openmetadata.service.secrets.masker.PasswordEntityMasker;
import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.TestUtils; import org.openmetadata.service.util.TestUtils;
@ -321,6 +336,72 @@ public class DatabaseServiceResourceTest
invalidEnumValue(Include.class)); invalidEnumValue(Include.class));
} }
@Test
@SneakyThrows
void testImportInvalidCsv() {
DatabaseService service = createEntity(createRequest("invalidCsv"), ADMIN_AUTH_HEADERS);
String serviceName = service.getFullyQualifiedName();
DatabaseResourceTest databaseTest = new DatabaseResourceTest();
CreateDatabase createDatabase = databaseTest.createRequest("s1").withService(serviceName);
databaseTest.createEntity(createDatabase, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain, extension
// Update database with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseServiceCsv.HEADERS));
String record = "d1,dsp1,dsc1,,Tag.invalidTag,,,,";
String csv = createCsv(DatabaseServiceCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(serviceName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
String[] expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// invalid tag it will give error.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,";
csv = createCsv(DatabaseServiceCsv.HEADERS, listOf(record), null);
result = importCsv(serviceName, csv, false);
assertSummary(result, ApiStatus.PARTIAL_SUCCESS, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// database will be created if it does not exist
String databaseFqn = FullyQualifiedName.add(serviceName, "non-existing");
record = "non-existing,dsp1,dsc1,,,,,,";
csv = createCsv(DatabaseServiceCsv.HEADERS, listOf(record), null);
result = importCsv(serviceName, csv, false);
assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0);
expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")};
assertRows(result, expectedRows);
Database createdDatabase = databaseTest.getEntityByName(databaseFqn, "id", ADMIN_AUTH_HEADERS);
assertEquals(databaseFqn, createdDatabase.getFullyQualifiedName());
}
@Test
void testImportExport() throws IOException {
String user1 = USER1.getName();
DatabaseService service = createEntity(createRequest("importExportTest"), ADMIN_AUTH_HEADERS);
DatabaseResourceTest databaseTest = new DatabaseResourceTest();
CreateDatabase createDatabase =
databaseTest.createRequest("d1").withService(service.getFullyQualifiedName());
databaseTest.createEntity(createDatabase, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain, extension
// Update terms with change in description
String record =
String.format(
"d1,dsp1,new-dsc1,user:%s,,,Tier.Tier1,%s,",
user1, escapeCsv(DOMAIN.getFullyQualifiedName()));
// Update created entity with changes
importCsvAndValidate(
service.getFullyQualifiedName(), DatabaseServiceCsv.HEADERS, null, listOf(record));
}
public DatabaseService putTestConnectionResult( public DatabaseService putTestConnectionResult(
UUID serviceId, TestConnectionResult testConnectionResult, Map<String, String> authHeaders) UUID serviceId, TestConnectionResult testConnectionResult, Map<String, String> authHeaders)
throws HttpResponseException { throws HttpResponseException {

View File

@ -243,6 +243,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -286,6 +287,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -329,6 +331,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -368,6 +371,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -413,6 +417,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -463,6 +468,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -536,6 +542,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -572,7 +579,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="sql-query-custom-property" className="custom-property-inline-edit-container sql-query-custom-property"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,
@ -645,6 +652,7 @@ export const PropertyValue: FC<PropertyValueProps> = ({
return ( return (
<InlineEdit <InlineEdit
className="custom-property-inline-edit-container"
isLoading={isLoading} isLoading={isLoading}
saveButtonProps={{ saveButtonProps={{
disabled: isLoading, disabled: isLoading,

View File

@ -56,7 +56,6 @@
.ant-space-item:first-child { .ant-space-item:first-child {
width: 100%; width: 100%;
} }
overflow-x: scroll;
} }
.custom-property-card { .custom-property-card {
.ant-card-body { .ant-card-body {
@ -93,3 +92,7 @@
font-size: 13px; font-size: 13px;
} }
} }
.custom-property-inline-edit-container {
overflow-x: scroll;
}