[Feature] Import/Export For Table, DatabaseSchema, Databases, Service (#15816)

* - Add Import Export Separation for GlossaryTerms

* - Fixed Table Resrouce Test

* - Review Comment #2

* - GlossaryTestFix, Glossary does not allow Tier Tags

* - Database Schema Tests Fix

* - Create Database, DatabaseSchema, DatabaseService import entity if not exists

* - Fix Test for Database DatabaseSchema, Table
This commit is contained in:
Mohit Yadav 2024-04-07 02:21:56 +05:30 committed by GitHub
parent 61bc8568f8
commit 5a88d15228
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 615 additions and 81 deletions

View File

@ -132,7 +132,45 @@ public final class CsvUtil {
csvRecord.add(
nullOrEmpty(tags)
? null
: tags.stream().map(TagLabel::getTagFQN).collect(Collectors.joining(FIELD_SEPARATOR)));
: tags.stream()
.filter(
tagLabel ->
tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION)
&& !tagLabel.getTagFQN().split("\\.")[0].equals("Tier")
&& !tagLabel.getLabelType().equals(TagLabel.LabelType.DERIVED))
.map(TagLabel::getTagFQN)
.collect(Collectors.joining(FIELD_SEPARATOR)));
return csvRecord;
}
public static List<String> addGlossaryTerms(List<String> csvRecord, List<TagLabel> tags) {
csvRecord.add(
nullOrEmpty(tags)
? null
: tags.stream()
.filter(
tagLabel ->
tagLabel.getSource().equals(TagLabel.TagSource.GLOSSARY)
&& !tagLabel.getTagFQN().split("\\.")[0].equals("Tier"))
.map(TagLabel::getTagFQN)
.collect(Collectors.joining(FIELD_SEPARATOR)));
return csvRecord;
}
public static List<String> addTagTiers(List<String> csvRecord, List<TagLabel> tags) {
csvRecord.add(
nullOrEmpty(tags)
? null
: tags.stream()
.filter(
tagLabel ->
tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION)
&& tagLabel.getTagFQN().split("\\.")[0].equals("Tier"))
.map(TagLabel::getTagFQN)
.collect(Collectors.joining(FIELD_SEPARATOR)));
return csvRecord;
}

View File

@ -34,6 +34,7 @@ import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVFormat.Builder;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.Pair;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.common.utils.CommonUtil;
import org.openmetadata.schema.EntityInterface;
@ -275,20 +276,26 @@ public abstract class EntityCsv<T extends EntityInterface> {
}
protected final List<TagLabel> getTagLabels(
CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException {
CSVPrinter printer,
CSVRecord csvRecord,
List<Pair<Integer, TagSource>> fieldNumbersWithSource)
throws IOException {
if (!processRecord) {
return null;
}
List<EntityReference> refs = getEntityReferences(printer, csvRecord, fieldNumber, Entity.TAG);
if (!processRecord || nullOrEmpty(refs)) {
return null;
}
List<TagLabel> tagLabels = new ArrayList<>();
for (EntityReference ref : refs) {
tagLabels.add(
new TagLabel()
.withSource(TagSource.CLASSIFICATION)
.withTagFQN(ref.getFullyQualifiedName()));
for (Pair<Integer, TagSource> pair : fieldNumbersWithSource) {
int fieldNumbers = pair.getLeft();
TagSource source = pair.getRight();
List<EntityReference> refs =
source == TagSource.CLASSIFICATION
? getEntityReferences(printer, csvRecord, fieldNumbers, Entity.TAG)
: getEntityReferences(printer, csvRecord, fieldNumbers, Entity.GLOSSARY_TERM);
if (processRecord && !nullOrEmpty(refs)) {
for (EntityReference ref : refs) {
tagLabels.add(new TagLabel().withSource(source).withTagFQN(ref.getFullyQualifiedName()));
}
}
}
return tagLabels;
}
@ -391,6 +398,7 @@ public abstract class EntityCsv<T extends EntityInterface> {
responseStatus = response.getStatus();
} catch (Exception ex) {
importFailure(resultsPrinter, ex.getMessage(), csvRecord);
importResult.setStatus(ApiStatus.FAILURE);
return;
}
} else { // Dry run don't create the entity

View File

@ -14,8 +14,10 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.csv.CsvUtil.addTagTiers;
import static org.openmetadata.service.Entity.DATABASE_SCHEMA;
import java.io.IOException;
@ -26,6 +28,7 @@ import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.Pair;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.EntityInterface;
@ -36,6 +39,7 @@ import org.openmetadata.schema.type.DatabaseProfilerConfig;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvFile;
import org.openmetadata.schema.type.csv.CsvHeader;
@ -116,7 +120,12 @@ public class DatabaseRepository extends EntityRepository<Database> {
@Override
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
Database database = getByName(null, name, Fields.EMPTY_FIELDS); // Validate glossary name
Database database =
getByName(
null,
name,
getFields(
"service")); // Validate glossary name, and get service needed in case of create
DatabaseCsv databaseCsv = new DatabaseCsv(database, user);
return databaseCsv.importCsv(csv, dryRun);
}
@ -234,22 +243,33 @@ public class DatabaseRepository extends EntityRepository<Database> {
try {
schema = Entity.getEntityByName(DATABASE_SCHEMA, schemaFqn, "*", Include.NON_DELETED);
} catch (Exception ex) {
importFailure(printer, entityNotFound(0, DATABASE_SCHEMA, schemaFqn), csvRecord);
processRecord = false;
return;
LOG.warn("Database Schema not found: {}, it will be created with Import.", schemaFqn);
schema =
new DatabaseSchema()
.withDatabase(database.getEntityReference())
.withService(database.getService());
}
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod,
// sourceUrl, domain
// Field 1,2,3,6,7 - database schema name, displayName, description
List<TagLabel> tagLabels =
getTagLabels(
printer,
csvRecord,
List.of(
Pair.of(4, TagLabel.TagSource.CLASSIFICATION),
Pair.of(5, TagLabel.TagSource.GLOSSARY),
Pair.of(6, TagLabel.TagSource.CLASSIFICATION)));
schema
.withName(csvRecord.get(0))
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withOwner(getOwner(printer, csvRecord, 3))
.withTags(getTagLabels(printer, csvRecord, 4))
.withRetentionPeriod(csvRecord.get(5))
.withSourceUrl(csvRecord.get(6))
.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
.withTags(tagLabels)
.withRetentionPeriod(csvRecord.get(7))
.withSourceUrl(csvRecord.get(8))
.withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN));
if (processRecord) {
createEntity(printer, csvRecord, schema);
}
@ -264,6 +284,8 @@ public class DatabaseRepository extends EntityRepository<Database> {
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addGlossaryTerms(recordList, entity.getTags());
addTagTiers(recordList, entity.getTags());
addField(recordList, entity.getRetentionPeriod());
addField(recordList, entity.getSourceUrl());
String domain =

View File

@ -14,8 +14,10 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.csv.CsvUtil.addTagTiers;
import static org.openmetadata.schema.type.Include.ALL;
import static org.openmetadata.service.Entity.DATABASE_SCHEMA;
import static org.openmetadata.service.Entity.TABLE;
@ -29,6 +31,7 @@ import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.Pair;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.EntityInterface;
@ -39,6 +42,7 @@ import org.openmetadata.schema.type.DatabaseSchemaProfilerConfig;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvFile;
import org.openmetadata.schema.type.csv.CsvHeader;
@ -188,7 +192,8 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
@Override
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema
DatabaseSchema schema =
getByName(null, name, getFields("database,service")); // Validate database schema
return new DatabaseSchemaCsv(schema, user).importCsv(csv, dryRun);
}
@ -266,21 +271,35 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
try {
table = Entity.getEntityByName(TABLE, tableFqn, "*", Include.NON_DELETED);
} catch (Exception ex) {
importFailure(printer, entityNotFound(0, TABLE, tableFqn), csvRecord);
processRecord = false;
return;
LOG.warn("Table not found: {}, it will be created with Import.", tableFqn);
table =
new Table()
.withService(schema.getService())
.withDatabase(schema.getDatabase())
.withDatabaseSchema(schema.getEntityReference());
}
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod,
// sourceUrl, domain
// Field 1,2,3,6,7 - database schema name, displayName, description
List<TagLabel> tagLabels =
getTagLabels(
printer,
csvRecord,
List.of(
Pair.of(4, TagLabel.TagSource.CLASSIFICATION),
Pair.of(5, TagLabel.TagSource.GLOSSARY),
Pair.of(6, TagLabel.TagSource.CLASSIFICATION)));
table
.withName(csvRecord.get(0))
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withOwner(getOwner(printer, csvRecord, 3))
.withTags(getTagLabels(printer, csvRecord, 4))
.withRetentionPeriod(csvRecord.get(5))
.withSourceUrl(csvRecord.get(6))
.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
.withTags(tagLabels)
.withRetentionPeriod(csvRecord.get(7))
.withSourceUrl(csvRecord.get(8))
.withColumns(new ArrayList<>())
.withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN));
if (processRecord) {
createEntity(printer, csvRecord, table);
@ -296,6 +315,8 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addGlossaryTerms(recordList, entity.getTags());
addTagTiers(recordList, entity.getTags());
addField(recordList, entity.getRetentionPeriod());
addField(recordList, entity.getSourceUrl());
String domain =

View File

@ -13,12 +13,39 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.csv.CsvUtil.addTagTiers;
import static org.openmetadata.service.Entity.DATABASE;
import static org.openmetadata.service.Entity.DATABASE_SERVICE;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.Pair;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.EntityInterface;
import org.openmetadata.schema.api.services.DatabaseConnection;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.entity.services.DatabaseService;
import org.openmetadata.schema.entity.services.ServiceType;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvFile;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.exception.EntityNotFoundException;
import org.openmetadata.service.resources.services.database.DatabaseServiceResource;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.FullyQualifiedName;
@Slf4j
public class DatabaseServiceRepository
@ -33,4 +60,92 @@ public class DatabaseServiceRepository
ServiceType.DATABASE);
supportsSearch = true;
}
@Override
public String exportToCsv(String name, String user) throws IOException {
DatabaseService databaseService =
getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate database name
DatabaseRepository repository = (DatabaseRepository) Entity.getEntityRepository(DATABASE);
ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("service", name);
List<Database> databases =
repository.listAll(repository.getFields("owner,tags,domain"), filter);
databases.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseServiceCsv(databaseService, user).exportCsv(databases);
}
@Override
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
// Validate database service
DatabaseService databaseService =
getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate glossary name
DatabaseServiceCsv databaseServiceCsv = new DatabaseServiceCsv(databaseService, user);
return databaseServiceCsv.importCsv(csv, dryRun);
}
public static class DatabaseServiceCsv extends EntityCsv<Database> {
public static final CsvDocumentation DOCUMENTATION = getCsvDocumentation(DATABASE_SERVICE);
public static final List<CsvHeader> HEADERS = DOCUMENTATION.getHeaders();
private final DatabaseService service;
DatabaseServiceCsv(DatabaseService service, String user) {
super(DATABASE, DOCUMENTATION.getHeaders(), user);
this.service = service;
}
@Override
protected void createEntity(CSVPrinter printer, List<CSVRecord> csvRecords) throws IOException {
CSVRecord csvRecord = getNextRecord(printer, csvRecords);
String databaseFqn =
FullyQualifiedName.add(service.getFullyQualifiedName(), csvRecord.get(0));
Database database;
try {
database = Entity.getEntityByName(DATABASE, databaseFqn, "*", Include.NON_DELETED);
} catch (EntityNotFoundException ex) {
LOG.warn("Database not found: {}, it will be created with Import.", databaseFqn);
database = new Database().withService(service.getEntityReference());
}
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain
// Field 1,2,3,6,7 - database service name, displayName, description
List<TagLabel> tagLabels =
getTagLabels(
printer,
csvRecord,
List.of(
Pair.of(4, TagLabel.TagSource.CLASSIFICATION),
Pair.of(5, TagLabel.TagSource.GLOSSARY),
Pair.of(6, TagLabel.TagSource.CLASSIFICATION)));
database
.withName(csvRecord.get(0))
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withOwner(getOwner(printer, csvRecord, 3))
.withTags(tagLabels)
.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
if (processRecord) {
createEntity(printer, csvRecord, database);
}
}
@Override
protected void addRecord(CsvFile csvFile, Database entity) {
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain
List<String> recordList = new ArrayList<>();
addField(recordList, entity.getName());
addField(recordList, entity.getDisplayName());
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addGlossaryTerms(recordList, entity.getTags());
addTagTiers(recordList, entity.getTags());
String domain =
entity.getDomain() == null || Boolean.TRUE.equals(entity.getDomain().getInherited())
? ""
: entity.getDomain().getFullyQualifiedName();
addField(recordList, domain);
addRecord(csvFile, recordList);
}
}
}

View File

@ -38,6 +38,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.csv.CsvUtil;
import org.openmetadata.csv.EntityCsv;
@ -50,6 +51,7 @@ import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.ProviderType;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.schema.type.TagLabel.TagSource;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvFile;
@ -179,7 +181,9 @@ public class GlossaryRepository extends EntityRepository<Glossary> {
.withSynonyms(CsvUtil.fieldToStrings(csvRecord.get(4)))
.withRelatedTerms(getEntityReferences(printer, csvRecord, 5, GLOSSARY_TERM))
.withReferences(getTermReferences(printer, csvRecord))
.withTags(getTagLabels(printer, csvRecord, 7))
.withTags(
getTagLabels(
printer, csvRecord, List.of(Pair.of(7, TagLabel.TagSource.CLASSIFICATION))))
.withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER))
.withOwner(getOwner(printer, csvRecord, 9))
.withStatus(getTermStatus(printer, csvRecord));

View File

@ -16,9 +16,12 @@ package org.openmetadata.service.jdbi3;
import static java.util.stream.Collectors.groupingBy;
import static org.openmetadata.common.utils.CommonUtil.listOf;
import static org.openmetadata.common.utils.CommonUtil.listOrEmpty;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addGlossaryTerms;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.csv.CsvUtil.addTagTiers;
import static org.openmetadata.schema.type.Include.ALL;
import static org.openmetadata.schema.type.Include.NON_DELETED;
import static org.openmetadata.service.Entity.DATABASE_SCHEMA;
@ -35,6 +38,7 @@ import static org.openmetadata.service.util.LambdaExceptionUtil.rethrowFunction;
import com.google.common.collect.Streams;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@ -62,6 +66,7 @@ import org.openmetadata.schema.entity.feed.Suggestion;
import org.openmetadata.schema.tests.CustomMetric;
import org.openmetadata.schema.tests.TestSuite;
import org.openmetadata.schema.type.Column;
import org.openmetadata.schema.type.ColumnDataType;
import org.openmetadata.schema.type.ColumnJoin;
import org.openmetadata.schema.type.ColumnProfile;
import org.openmetadata.schema.type.ColumnProfilerConfig;
@ -774,7 +779,11 @@ public class TableRepository extends EntityRepository<Table> {
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
// Validate table
Table table = getByName(null, name, new Fields(allowedFields, "owner,domain,tags,columns"));
Table table =
getByName(
null,
name,
new Fields(allowedFields, "owner,domain,tags,columns,database,service,databaseSchema"));
return new TableCsv(table, user).importCsv(csv, dryRun);
}
@ -1157,19 +1166,29 @@ public class TableRepository extends EntityRepository<Table> {
@Override
protected void createEntity(CSVPrinter printer, List<CSVRecord> csvRecords) throws IOException {
CSVRecord csvRecord = getNextRecord(printer, csvRecords);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// column.fullyQualifiedName, column.displayName, column.description, column.dataTypeDisplay,
// column.tags
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod,
// sourceUrl, domain, column.fullyQualifiedName, column.displayName, column.description,
// column.dataTypeDisplay,
// column.tags, column.glossaryTerms
if (processRecord) {
// fields tags(4), glossaryTerms(5), tiers(6)
List<TagLabel> tagLabels =
getTagLabels(
printer,
csvRecord,
List.of(
Pair.of(4, TagLabel.TagSource.CLASSIFICATION),
Pair.of(5, TagLabel.TagSource.GLOSSARY),
Pair.of(6, TagLabel.TagSource.CLASSIFICATION)));
table
.withName(csvRecord.get(0))
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withOwner(getOwner(printer, csvRecord, 3))
.withTags(getTagLabels(printer, csvRecord, 4))
.withRetentionPeriod(csvRecord.get(5))
.withSourceUrl(csvRecord.get(6))
.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
.withTags(tagLabels != null && tagLabels.isEmpty() ? null : tagLabels)
.withRetentionPeriod(csvRecord.get(7))
.withSourceUrl(csvRecord.get(8))
.withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN));
ImportResult importResult = updateColumn(printer, csvRecord);
if (importResult.result().equals(IMPORT_FAILED)) {
importFailure(printer, importResult.details(), csvRecord);
@ -1202,16 +1221,71 @@ public class TableRepository extends EntityRepository<Table> {
if (!processRecord) {
return new ImportResult(IMPORT_SKIPPED, csvRecord, "");
}
String columnFqn = csvRecord.get(8);
String columnFqn = csvRecord.get(10);
Column column = findColumn(table.getColumns(), columnFqn);
boolean columnExists = column != null;
if (column == null) {
processRecord = false;
return new ImportResult(IMPORT_FAILED, csvRecord, columnNotFound(8, columnFqn));
// Create Column, if not found
column =
new Column()
.withName(getLocalColumnName(table.getFullyQualifiedName(), columnFqn))
.withFullyQualifiedName(
table.getFullyQualifiedName() + Entity.SEPARATOR + columnFqn);
}
column.withDisplayName(csvRecord.get(9));
column.withDescription(csvRecord.get(10));
column.withDataTypeDisplay(csvRecord.get(11));
column.withTags(getTagLabels(printer, csvRecord, 12));
column.withDisplayName(csvRecord.get(11));
column.withDescription(csvRecord.get(12));
column.withDataTypeDisplay(csvRecord.get(13));
column.withDataType(
nullOrEmpty(csvRecord.get(14)) ? null : ColumnDataType.fromValue(csvRecord.get(14)));
column.withArrayDataType(
nullOrEmpty(csvRecord.get(15)) ? null : ColumnDataType.fromValue(csvRecord.get(15)));
column.withDataLength(
nullOrEmpty(csvRecord.get(16)) ? null : Integer.parseInt(csvRecord.get(16)));
List<TagLabel> tagLabels =
getTagLabels(
printer,
csvRecord,
List.of(
Pair.of(17, TagLabel.TagSource.CLASSIFICATION),
Pair.of(18, TagLabel.TagSource.GLOSSARY)));
column.withTags(nullOrEmpty(tagLabels) ? null : tagLabels);
column.withOrdinalPosition(nullOrEmpty(table.getColumns()) ? 0 : table.getColumns().size());
// If Column Does not Exist add it to the table
if (!columnExists) {
String[] splitColumnName = FullyQualifiedName.split(columnFqn);
// Parent Column
if (splitColumnName.length == 1) {
List<Column> tableColumns =
table.getColumns() == null ? new ArrayList<>() : table.getColumns();
tableColumns.add(column);
table.withColumns(tableColumns);
} else {
String parentColumnFqn =
String.join(
Entity.SEPARATOR, Arrays.copyOf(splitColumnName, splitColumnName.length - 1));
Column parentColumn = findColumn(table.getColumns(), parentColumnFqn);
if (parentColumn == null) {
return new ImportResult(
IMPORT_FAILED,
csvRecord,
"Parent Column not found. Check the order of the columns in the CSV file.");
}
// Update Name And Ordinal position in the parent column
column.withName(splitColumnName[splitColumnName.length - 1]);
column.withOrdinalPosition(
nullOrEmpty(parentColumn.getChildren()) ? 0 : parentColumn.getChildren().size());
// Add this column to children of Parent
List<Column> children =
nullOrEmpty(parentColumn.getChildren())
? new ArrayList<>()
: parentColumn.getChildren();
children.add(column);
parentColumn.withChildren(children);
}
}
return new ImportResult(IMPORT_SUCCESS, csvRecord, ENTITY_UPDATED);
}
@ -1226,6 +1300,8 @@ public class TableRepository extends EntityRepository<Table> {
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addGlossaryTerms(recordList, entity.getTags());
addTagTiers(recordList, entity.getTags());
addField(recordList, entity.getRetentionPeriod());
addField(recordList, entity.getSourceUrl());
String domain =
@ -1243,7 +1319,7 @@ public class TableRepository extends EntityRepository<Table> {
private void addRecord(
CsvFile csvFile, List<String> recordList, Column column, boolean emptyTableDetails) {
if (emptyTableDetails) {
for (int i = 0; i < 8; i++) {
for (int i = 0; i < 10; i++) {
addField(recordList, (String) null); // Add empty fields for table information
}
}
@ -1253,7 +1329,14 @@ public class TableRepository extends EntityRepository<Table> {
addField(recordList, column.getDisplayName());
addField(recordList, column.getDescription());
addField(recordList, column.getDataTypeDisplay());
addField(recordList, column.getDataType() == null ? null : column.getDataType().value());
addField(
recordList, column.getArrayDataType() == null ? null : column.getArrayDataType().value());
addField(
recordList,
column.getDataLength() == null ? null : String.valueOf(column.getDataLength()));
addTagLabels(recordList, column.getTags());
addGlossaryTerms(recordList, column.getTags());
addRecord(csvFile, recordList);
listOrEmpty(column.getChildren())
.forEach(c -> addRecord(csvFile, new ArrayList<>(), c, true));

View File

@ -22,6 +22,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.parameters.RequestBody;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
@ -55,6 +56,7 @@ import org.openmetadata.schema.entity.services.connections.TestConnectionResult;
import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseServiceRepository;
import org.openmetadata.service.resources.Collection;
@ -408,6 +410,64 @@ public class DatabaseServiceResource
return patchInternal(uriInfo, securityContext, id, patch);
}
@GET
@Path("/name/{name}/export")
@Produces(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "exportDatabaseServices",
summary = "Export database service in CSV format",
responses = {
@ApiResponse(
responseCode = "200",
description = "Exported csv with services from the database services",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = String.class)))
})
public String exportCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database Service", schema = @Schema(type = "string"))
@PathParam("name")
String name)
throws IOException {
return exportCsvInternal(securityContext, name);
}
@PUT
@Path("/name/{name}/import")
@Consumes(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "importDatabaseService",
summary = "Import service from CSV to update database service (no creation allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import result",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public CsvImportResult importCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database Service", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv)
throws IOException {
return importCsvInternal(securityContext, name, csv, dryRun);
}
@DELETE
@Path("/{id}")
@Operation(

View File

@ -43,6 +43,24 @@
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "glossaryTerms",
"required": false,
"description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.",
"examples": [
"`Glossary.GlossaryTerm1`",
"`Glossary.GlossaryTerm1.GlossaryTerm2`"
]
},
{
"name": "tiers",
"required": false,
"description": "Fully qualified tier tags names associated with the table separated by ';'.",
"examples": [
"`Tier.Tier1`",
"`Tier.Tier2`"
]
},
{
"name": "retentionPeriod",
"required": false,

View File

@ -43,6 +43,24 @@
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "glossaryTerms",
"required": false,
"description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.",
"examples": [
"`Glossary.GlossaryTerm1`",
"`Glossary.GlossaryTerm1.GlossaryTerm2`"
]
},
{
"name": "tiers",
"required": false,
"description": "Fully qualified tier tags names associated with the table separated by ';'.",
"examples": [
"`Tier.Tier1`",
"`Tier.Tier2`"
]
},
{
"name": "retentionPeriod",
"required": false,

View File

@ -0,0 +1,73 @@
{
"summary": "Database Service CSV file is used for importing and exporting service metadata from and to an **existing** database service.",
"headers": [
{
"name": "name",
"required": true,
"description": "The name of the database schema being updated.",
"examples": [
"`users`, `customers`"
]
},
{
"name": "displayName",
"required": false,
"description": "Display name for the table.",
"examples": [
"`User Schema`, `Customer Schema`"
]
},
{
"name": "description",
"required": false,
"description": "Description for the database schema in Markdown format.",
"examples": [
"`Customer Schema` that contains all the tables related to customer entity."
]
},
{
"name": "owner",
"required": false,
"description": "Owner names separated by ';'. For team owner, include prefix team. For user owner, include prefix user.",
"examples": [
"`team;marketing`",
"`user;john`"
]
},
{
"name": "tags",
"required": false,
"description": "Fully qualified classification tag names associated with the database schema separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.",
"examples": [
"`PII.Sensitive`",
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "glossaryTerms",
"required": false,
"description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.",
"examples": [
"`Glossary.GlossaryTerm1`",
"`Glossary.GlossaryTerm1.GlossaryTerm2`"
]
},
{
"name": "tiers",
"required": false,
"description": "Fully qualified tier tags names associated with the table separated by ';'.",
"examples": [
"`Tier.Tier1`",
"`Tier.Tier2`"
]
},
{
"name": "domain",
"required": false,
"description": "Domain to which the database schema belongs to",
"examples": [
"Marketing", "Sales"
]
}
]
}

View File

@ -43,6 +43,24 @@
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "glossaryTerms",
"required": false,
"description": "Fully qualified glossary term names associated with the table separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.",
"examples": [
"`Glossary.GlossaryTerm1`",
"`Glossary.GlossaryTerm1.GlossaryTerm2`"
]
},
{
"name": "tiers",
"required": false,
"description": "Fully qualified tier tags names associated with the table separated by ';'.",
"examples": [
"`Tier.Tier1`",
"`Tier.Tier2`"
]
},
{
"name": "retentionPeriod",
"required": false,
@ -99,6 +117,31 @@
"array<int>", "map<int, string>"
]
},
{
"name": "column.dataType",
"required": false,
"description": "Actual Column data type.",
"examples": [
"BLOB", "DATE"
]
},
{
"name": "column.arrayDataType",
"required": false,
"description": "In case of data Type being Array, type of Array Data.",
"examples": [
"BLOB", "DATE"
]
},
{
"name": "column.dataLength",
"required": false,
"description": "Data Length of Column in case of CHAR, VARCHAR, BINARY etc.",
"examples": [
"36"
]
},
{
"name": "column.tags",
"required": false,
@ -107,6 +150,15 @@
"`PII.Sensitive`",
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "column.glossaryTerms",
"required": false,
"description": "Fully qualified glossary term names associated with the column separated by ';'.. Tags automatically derived along with some glossaryTerm will be in `tags`.",
"examples": [
"`Glossary.GlossaryTerm1`",
"`Glossary.GlossaryTerm1.GlossaryTerm2`"
]
}
]
}

View File

@ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.csv.EntityCsvTest.assertSummary;
import static org.openmetadata.csv.EntityCsvTest.createCsv;
import static org.openmetadata.csv.EntityCsvTest.getFailedRecord;
import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord;
import static org.openmetadata.service.util.EntityUtil.getFqn;
import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import static org.openmetadata.service.util.TestUtils.assertListNotEmpty;
@ -44,6 +45,7 @@ import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.api.data.CreateDatabase;
import org.openmetadata.schema.api.data.CreateDatabaseSchema;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.entity.data.DatabaseSchema;
import org.openmetadata.schema.type.ApiStatus;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.csv.CsvImportResult;
@ -117,7 +119,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Update databaseSchema with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,";
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,";
String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
@ -127,18 +129,27 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
};
assertRows(result, expectedRows);
// Existing schema can be updated. New schema can't be created.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,";
// invalid tag it will give error.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing");
expectedRows =
new String[] {
resultsHeader,
getFailedRecord(record, entityNotFound(0, Entity.DATABASE_SCHEMA, schemaFqn))
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// databaseSchema will be created if it does not exist
String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing");
record = "non-existing,dsp1,dsc1,,,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0);
expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")};
assertRows(result, expectedRows);
DatabaseSchema createdSchema = schemaTest.getEntityByName(schemaFqn, "id", ADMIN_AUTH_HEADERS);
assertEquals(schemaFqn, createdSchema.getFullyQualifiedName());
}
@Test
@ -150,11 +161,12 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName());
schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, retentionPeriod,
// sourceUrl, domain
// Update terms with change in description
String record =
String.format(
"s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s",
"s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s",
user1, escapeCsv(DOMAIN.getFullyQualifiedName()));
// Update created entity with changes

View File

@ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.csv.EntityCsvTest.assertSummary;
import static org.openmetadata.csv.EntityCsvTest.createCsv;
import static org.openmetadata.csv.EntityCsvTest.getFailedRecord;
import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord;
import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import static org.openmetadata.service.util.TestUtils.assertListNotNull;
import static org.openmetadata.service.util.TestUtils.assertListNull;
@ -118,7 +119,7 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Create table with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,";
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,";
String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
@ -128,17 +129,27 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea
};
assertRows(result, expectedRows);
// Existing table can be updated. New table can't be created.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,";
// Tag will cause failure
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing");
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(0, Entity.TABLE, tableFqn))
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// non-existing table will cause
record = "non-existing,dsp1,dsc1,,,,,,,";
String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing");
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0);
expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")};
assertRows(result, expectedRows);
Table table = tableTest.getEntityByName(tableFqn, "id", ADMIN_AUTH_HEADERS);
assertEquals(tableFqn, table.getFullyQualifiedName());
}
@Test
@ -155,7 +166,7 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea
List<String> updateRecords =
listOf(
String.format(
"s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s",
"s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s",
user1, escapeCsv(DOMAIN.getFullyQualifiedName())));
// Update created entity with changes

View File

@ -32,6 +32,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.csv.EntityCsvTest.assertSummary;
import static org.openmetadata.csv.EntityCsvTest.createCsv;
import static org.openmetadata.csv.EntityCsvTest.getFailedRecord;
import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord;
import static org.openmetadata.schema.type.ColumnDataType.ARRAY;
import static org.openmetadata.schema.type.ColumnDataType.BIGINT;
import static org.openmetadata.schema.type.ColumnDataType.BINARY;
@ -2294,7 +2295,7 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Create table with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(TableCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,c1,c1,c1,INT,";
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,,c1,c1,c1,,INT,,,,";
String csv = createCsv(TableCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(tableName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
@ -2306,26 +2307,23 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
assertRows(result, expectedRows);
// Add an invalid column tag
record = "s1,dsp1,dsc1,,,,,,c1,,,,Tag.invalidTag";
record = "s1,dsp1,dsc1,,,,,,,,c1,,,,INT,,,Tag.invalidTag,";
csv = createCsv(TableCsv.HEADERS, listOf(record), null);
result = importCsv(tableName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(12, "tag", "Tag.invalidTag"))
getFailedRecord(record, EntityCsv.entityNotFound(17, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// Update a non existing column
record = "s1,dsp1,dsc1,,,,,,nonExistingColumn,,,,";
// Update a non-existing column, this should create a new column with name "nonExistingColumn"
record = "s1,dsp1,dsc1,,,,,,,,nonExistingColumn,,,,INT,,,,";
csv = createCsv(TableCsv.HEADERS, listOf(record), null);
result = importCsv(tableName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.columnNotFound(8, "nonExistingColumn"))
};
assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0);
expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity updated")};
assertRows(result, expectedRows);
}
@ -2341,17 +2339,18 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
createRequest("s1").withColumns(listOf(c1, c2, c3)).withTableConstraints(null);
Table table = createEntity(createTable, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod,
// sourceUrl, domain
// Update terms with change in description
List<String> updateRecords =
listOf(
String.format(
"s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s,c1,"
+ "dsp1-new,desc1,type,PII.Sensitive",
"s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s,c1,"
+ "dsp1-new,desc1,type,STRUCT,,,PII.Sensitive,",
user1, escapeCsv(DOMAIN.getFullyQualifiedName())),
",,,,,,,,c1.c11,dsp11-new,desc11,type1,PII.Sensitive",
",,,,,,,,c2,,,,",
",,,,,,,,c3,,,,");
",,,,,,,,,,c1.c11,dsp11-new,desc11,type1,INT,,,PII.Sensitive,",
",,,,,,,,,,c2,,,type1,INT,,,,",
",,,,,,,,,,c3,,,type1,INT,,,,");
// Update created entity with changes
importCsvAndValidate(table.getFullyQualifiedName(), TableCsv.HEADERS, null, updateRecords);

View File

@ -413,10 +413,10 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
List<String> createRecords =
listOf(
String.format(
",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s",
",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s",
user1, user2, user1, "Approved"),
String.format(
",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s",
",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s",
user1, user2, "Approved"),
String.format(
"importExportTest.g1,g11,dsp2,dsc11,h1;h3;h3,,,,%s,team;%s,%s",
@ -426,10 +426,10 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
List<String> updateRecords =
listOf(
String.format(
",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s",
",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s",
user1, user2, user1, "Approved"),
String.format(
",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s",
",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s",
user1, user2, "Approved"),
String.format(
"importExportTest.g1,g11,dsp2,new-dsc11,h1;h3;h3,,,,%s,team;%s,%s",
@ -437,7 +437,7 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
// Add new row to existing rows
List<String> newRecords =
listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,Tier.Tier3,,,Approved");
listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,PII.Sensitive,,,Approved");
testImportExport(
glossary.getName(), GlossaryCsv.HEADERS, createRecords, updateRecords, newRecords);
}