mirror of
				https://github.com/open-metadata/OpenMetadata.git
				synced 2025-10-31 18:48:35 +00:00 
			
		
		
		
	[Feature] Import/Export For Table, DatabaseSchema, Databases, Service (#15816)
* - Add Import Export Separation for GlossaryTerms * - Fixed Table Resrouce Test * - Review Comment #2 * - GlossaryTestFix, Glossary does not allow Tier Tags * - Database Schema Tests Fix * - Create Database, DatabaseSchema, DatabaseService import entity if not exists * - Fix Test for Database DatabaseSchema, Table
This commit is contained in:
		
							parent
							
								
									61bc8568f8
								
							
						
					
					
						commit
						5a88d15228
					
				| @ -132,7 +132,45 @@ public final class CsvUtil { | |||||||
|     csvRecord.add( |     csvRecord.add( | ||||||
|         nullOrEmpty(tags) |         nullOrEmpty(tags) | ||||||
|             ? null |             ? null | ||||||
|             : tags.stream().map(TagLabel::getTagFQN).collect(Collectors.joining(FIELD_SEPARATOR))); |             : tags.stream() | ||||||
|  |                 .filter( | ||||||
|  |                     tagLabel -> | ||||||
|  |                         tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION) | ||||||
|  |                             && !tagLabel.getTagFQN().split("\\.")[0].equals("Tier") | ||||||
|  |                             && !tagLabel.getLabelType().equals(TagLabel.LabelType.DERIVED)) | ||||||
|  |                 .map(TagLabel::getTagFQN) | ||||||
|  |                 .collect(Collectors.joining(FIELD_SEPARATOR))); | ||||||
|  | 
 | ||||||
|  |     return csvRecord; | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   public static List<String> addGlossaryTerms(List<String> csvRecord, List<TagLabel> tags) { | ||||||
|  |     csvRecord.add( | ||||||
|  |         nullOrEmpty(tags) | ||||||
|  |             ? null | ||||||
|  |             : tags.stream() | ||||||
|  |                 .filter( | ||||||
|  |                     tagLabel -> | ||||||
|  |                         tagLabel.getSource().equals(TagLabel.TagSource.GLOSSARY) | ||||||
|  |                             && !tagLabel.getTagFQN().split("\\.")[0].equals("Tier")) | ||||||
|  |                 .map(TagLabel::getTagFQN) | ||||||
|  |                 .collect(Collectors.joining(FIELD_SEPARATOR))); | ||||||
|  | 
 | ||||||
|  |     return csvRecord; | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   public static List<String> addTagTiers(List<String> csvRecord, List<TagLabel> tags) { | ||||||
|  |     csvRecord.add( | ||||||
|  |         nullOrEmpty(tags) | ||||||
|  |             ? null | ||||||
|  |             : tags.stream() | ||||||
|  |                 .filter( | ||||||
|  |                     tagLabel -> | ||||||
|  |                         tagLabel.getSource().equals(TagLabel.TagSource.CLASSIFICATION) | ||||||
|  |                             && tagLabel.getTagFQN().split("\\.")[0].equals("Tier")) | ||||||
|  |                 .map(TagLabel::getTagFQN) | ||||||
|  |                 .collect(Collectors.joining(FIELD_SEPARATOR))); | ||||||
|  | 
 | ||||||
|     return csvRecord; |     return csvRecord; | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -34,6 +34,7 @@ import org.apache.commons.csv.CSVFormat; | |||||||
| import org.apache.commons.csv.CSVFormat.Builder; | import org.apache.commons.csv.CSVFormat.Builder; | ||||||
| import org.apache.commons.csv.CSVPrinter; | import org.apache.commons.csv.CSVPrinter; | ||||||
| import org.apache.commons.csv.CSVRecord; | import org.apache.commons.csv.CSVRecord; | ||||||
|  | import org.apache.commons.lang3.tuple.Pair; | ||||||
| import org.jdbi.v3.sqlobject.transaction.Transaction; | import org.jdbi.v3.sqlobject.transaction.Transaction; | ||||||
| import org.openmetadata.common.utils.CommonUtil; | import org.openmetadata.common.utils.CommonUtil; | ||||||
| import org.openmetadata.schema.EntityInterface; | import org.openmetadata.schema.EntityInterface; | ||||||
| @ -275,20 +276,26 @@ public abstract class EntityCsv<T extends EntityInterface> { | |||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   protected final List<TagLabel> getTagLabels( |   protected final List<TagLabel> getTagLabels( | ||||||
|       CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException { |       CSVPrinter printer, | ||||||
|  |       CSVRecord csvRecord, | ||||||
|  |       List<Pair<Integer, TagSource>> fieldNumbersWithSource) | ||||||
|  |       throws IOException { | ||||||
|     if (!processRecord) { |     if (!processRecord) { | ||||||
|       return null; |       return null; | ||||||
|     } |     } | ||||||
|     List<EntityReference> refs = getEntityReferences(printer, csvRecord, fieldNumber, Entity.TAG); |  | ||||||
|     if (!processRecord || nullOrEmpty(refs)) { |  | ||||||
|       return null; |  | ||||||
|     } |  | ||||||
|     List<TagLabel> tagLabels = new ArrayList<>(); |     List<TagLabel> tagLabels = new ArrayList<>(); | ||||||
|  |     for (Pair<Integer, TagSource> pair : fieldNumbersWithSource) { | ||||||
|  |       int fieldNumbers = pair.getLeft(); | ||||||
|  |       TagSource source = pair.getRight(); | ||||||
|  |       List<EntityReference> refs = | ||||||
|  |           source == TagSource.CLASSIFICATION | ||||||
|  |               ? getEntityReferences(printer, csvRecord, fieldNumbers, Entity.TAG) | ||||||
|  |               : getEntityReferences(printer, csvRecord, fieldNumbers, Entity.GLOSSARY_TERM); | ||||||
|  |       if (processRecord && !nullOrEmpty(refs)) { | ||||||
|         for (EntityReference ref : refs) { |         for (EntityReference ref : refs) { | ||||||
|       tagLabels.add( |           tagLabels.add(new TagLabel().withSource(source).withTagFQN(ref.getFullyQualifiedName())); | ||||||
|           new TagLabel() |         } | ||||||
|               .withSource(TagSource.CLASSIFICATION) |       } | ||||||
|               .withTagFQN(ref.getFullyQualifiedName())); |  | ||||||
|     } |     } | ||||||
|     return tagLabels; |     return tagLabels; | ||||||
|   } |   } | ||||||
| @ -391,6 +398,7 @@ public abstract class EntityCsv<T extends EntityInterface> { | |||||||
|         responseStatus = response.getStatus(); |         responseStatus = response.getStatus(); | ||||||
|       } catch (Exception ex) { |       } catch (Exception ex) { | ||||||
|         importFailure(resultsPrinter, ex.getMessage(), csvRecord); |         importFailure(resultsPrinter, ex.getMessage(), csvRecord); | ||||||
|  |         importResult.setStatus(ApiStatus.FAILURE); | ||||||
|         return; |         return; | ||||||
|       } |       } | ||||||
|     } else { // Dry run don't create the entity |     } else { // Dry run don't create the entity | ||||||
|  | |||||||
| @ -14,8 +14,10 @@ | |||||||
| package org.openmetadata.service.jdbi3; | package org.openmetadata.service.jdbi3; | ||||||
| 
 | 
 | ||||||
| import static org.openmetadata.csv.CsvUtil.addField; | import static org.openmetadata.csv.CsvUtil.addField; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; | ||||||
| import static org.openmetadata.csv.CsvUtil.addOwner; | import static org.openmetadata.csv.CsvUtil.addOwner; | ||||||
| import static org.openmetadata.csv.CsvUtil.addTagLabels; | import static org.openmetadata.csv.CsvUtil.addTagLabels; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addTagTiers; | ||||||
| import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | ||||||
| 
 | 
 | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| @ -26,6 +28,7 @@ import java.util.UUID; | |||||||
| import lombok.extern.slf4j.Slf4j; | import lombok.extern.slf4j.Slf4j; | ||||||
| import org.apache.commons.csv.CSVPrinter; | import org.apache.commons.csv.CSVPrinter; | ||||||
| import org.apache.commons.csv.CSVRecord; | import org.apache.commons.csv.CSVRecord; | ||||||
|  | import org.apache.commons.lang3.tuple.Pair; | ||||||
| import org.jdbi.v3.sqlobject.transaction.Transaction; | import org.jdbi.v3.sqlobject.transaction.Transaction; | ||||||
| import org.openmetadata.csv.EntityCsv; | import org.openmetadata.csv.EntityCsv; | ||||||
| import org.openmetadata.schema.EntityInterface; | import org.openmetadata.schema.EntityInterface; | ||||||
| @ -36,6 +39,7 @@ import org.openmetadata.schema.type.DatabaseProfilerConfig; | |||||||
| import org.openmetadata.schema.type.EntityReference; | import org.openmetadata.schema.type.EntityReference; | ||||||
| import org.openmetadata.schema.type.Include; | import org.openmetadata.schema.type.Include; | ||||||
| import org.openmetadata.schema.type.Relationship; | import org.openmetadata.schema.type.Relationship; | ||||||
|  | import org.openmetadata.schema.type.TagLabel; | ||||||
| import org.openmetadata.schema.type.csv.CsvDocumentation; | import org.openmetadata.schema.type.csv.CsvDocumentation; | ||||||
| import org.openmetadata.schema.type.csv.CsvFile; | import org.openmetadata.schema.type.csv.CsvFile; | ||||||
| import org.openmetadata.schema.type.csv.CsvHeader; | import org.openmetadata.schema.type.csv.CsvHeader; | ||||||
| @ -116,7 +120,12 @@ public class DatabaseRepository extends EntityRepository<Database> { | |||||||
|   @Override |   @Override | ||||||
|   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) |   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) | ||||||
|       throws IOException { |       throws IOException { | ||||||
|     Database database = getByName(null, name, Fields.EMPTY_FIELDS); // Validate glossary name |     Database database = | ||||||
|  |         getByName( | ||||||
|  |             null, | ||||||
|  |             name, | ||||||
|  |             getFields( | ||||||
|  |                 "service")); // Validate glossary name, and get service needed in case of create | ||||||
|     DatabaseCsv databaseCsv = new DatabaseCsv(database, user); |     DatabaseCsv databaseCsv = new DatabaseCsv(database, user); | ||||||
|     return databaseCsv.importCsv(csv, dryRun); |     return databaseCsv.importCsv(csv, dryRun); | ||||||
|   } |   } | ||||||
| @ -234,22 +243,33 @@ public class DatabaseRepository extends EntityRepository<Database> { | |||||||
|       try { |       try { | ||||||
|         schema = Entity.getEntityByName(DATABASE_SCHEMA, schemaFqn, "*", Include.NON_DELETED); |         schema = Entity.getEntityByName(DATABASE_SCHEMA, schemaFqn, "*", Include.NON_DELETED); | ||||||
|       } catch (Exception ex) { |       } catch (Exception ex) { | ||||||
|         importFailure(printer, entityNotFound(0, DATABASE_SCHEMA, schemaFqn), csvRecord); |         LOG.warn("Database Schema not found: {}, it will be created with Import.", schemaFqn); | ||||||
|         processRecord = false; |         schema = | ||||||
|         return; |             new DatabaseSchema() | ||||||
|  |                 .withDatabase(database.getEntityReference()) | ||||||
|  |                 .withService(database.getService()); | ||||||
|       } |       } | ||||||
| 
 | 
 | ||||||
|       // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |       // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, | ||||||
|  |       // sourceUrl, domain | ||||||
|       // Field 1,2,3,6,7 - database schema name, displayName, description |       // Field 1,2,3,6,7 - database schema name, displayName, description | ||||||
|  |       List<TagLabel> tagLabels = | ||||||
|  |           getTagLabels( | ||||||
|  |               printer, | ||||||
|  |               csvRecord, | ||||||
|  |               List.of( | ||||||
|  |                   Pair.of(4, TagLabel.TagSource.CLASSIFICATION), | ||||||
|  |                   Pair.of(5, TagLabel.TagSource.GLOSSARY), | ||||||
|  |                   Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); | ||||||
|       schema |       schema | ||||||
|           .withName(csvRecord.get(0)) |           .withName(csvRecord.get(0)) | ||||||
|           .withDisplayName(csvRecord.get(1)) |           .withDisplayName(csvRecord.get(1)) | ||||||
|           .withDescription(csvRecord.get(2)) |           .withDescription(csvRecord.get(2)) | ||||||
|           .withOwner(getOwner(printer, csvRecord, 3)) |           .withOwner(getOwner(printer, csvRecord, 3)) | ||||||
|           .withTags(getTagLabels(printer, csvRecord, 4)) |           .withTags(tagLabels) | ||||||
|           .withRetentionPeriod(csvRecord.get(5)) |           .withRetentionPeriod(csvRecord.get(7)) | ||||||
|           .withSourceUrl(csvRecord.get(6)) |           .withSourceUrl(csvRecord.get(8)) | ||||||
|           .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); |           .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); | ||||||
|       if (processRecord) { |       if (processRecord) { | ||||||
|         createEntity(printer, csvRecord, schema); |         createEntity(printer, csvRecord, schema); | ||||||
|       } |       } | ||||||
| @ -264,6 +284,8 @@ public class DatabaseRepository extends EntityRepository<Database> { | |||||||
|       addField(recordList, entity.getDescription()); |       addField(recordList, entity.getDescription()); | ||||||
|       addOwner(recordList, entity.getOwner()); |       addOwner(recordList, entity.getOwner()); | ||||||
|       addTagLabels(recordList, entity.getTags()); |       addTagLabels(recordList, entity.getTags()); | ||||||
|  |       addGlossaryTerms(recordList, entity.getTags()); | ||||||
|  |       addTagTiers(recordList, entity.getTags()); | ||||||
|       addField(recordList, entity.getRetentionPeriod()); |       addField(recordList, entity.getRetentionPeriod()); | ||||||
|       addField(recordList, entity.getSourceUrl()); |       addField(recordList, entity.getSourceUrl()); | ||||||
|       String domain = |       String domain = | ||||||
|  | |||||||
| @ -14,8 +14,10 @@ | |||||||
| package org.openmetadata.service.jdbi3; | package org.openmetadata.service.jdbi3; | ||||||
| 
 | 
 | ||||||
| import static org.openmetadata.csv.CsvUtil.addField; | import static org.openmetadata.csv.CsvUtil.addField; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; | ||||||
| import static org.openmetadata.csv.CsvUtil.addOwner; | import static org.openmetadata.csv.CsvUtil.addOwner; | ||||||
| import static org.openmetadata.csv.CsvUtil.addTagLabels; | import static org.openmetadata.csv.CsvUtil.addTagLabels; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addTagTiers; | ||||||
| import static org.openmetadata.schema.type.Include.ALL; | import static org.openmetadata.schema.type.Include.ALL; | ||||||
| import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | ||||||
| import static org.openmetadata.service.Entity.TABLE; | import static org.openmetadata.service.Entity.TABLE; | ||||||
| @ -29,6 +31,7 @@ import java.util.UUID; | |||||||
| import lombok.extern.slf4j.Slf4j; | import lombok.extern.slf4j.Slf4j; | ||||||
| import org.apache.commons.csv.CSVPrinter; | import org.apache.commons.csv.CSVPrinter; | ||||||
| import org.apache.commons.csv.CSVRecord; | import org.apache.commons.csv.CSVRecord; | ||||||
|  | import org.apache.commons.lang3.tuple.Pair; | ||||||
| import org.jdbi.v3.sqlobject.transaction.Transaction; | import org.jdbi.v3.sqlobject.transaction.Transaction; | ||||||
| import org.openmetadata.csv.EntityCsv; | import org.openmetadata.csv.EntityCsv; | ||||||
| import org.openmetadata.schema.EntityInterface; | import org.openmetadata.schema.EntityInterface; | ||||||
| @ -39,6 +42,7 @@ import org.openmetadata.schema.type.DatabaseSchemaProfilerConfig; | |||||||
| import org.openmetadata.schema.type.EntityReference; | import org.openmetadata.schema.type.EntityReference; | ||||||
| import org.openmetadata.schema.type.Include; | import org.openmetadata.schema.type.Include; | ||||||
| import org.openmetadata.schema.type.Relationship; | import org.openmetadata.schema.type.Relationship; | ||||||
|  | import org.openmetadata.schema.type.TagLabel; | ||||||
| import org.openmetadata.schema.type.csv.CsvDocumentation; | import org.openmetadata.schema.type.csv.CsvDocumentation; | ||||||
| import org.openmetadata.schema.type.csv.CsvFile; | import org.openmetadata.schema.type.csv.CsvFile; | ||||||
| import org.openmetadata.schema.type.csv.CsvHeader; | import org.openmetadata.schema.type.csv.CsvHeader; | ||||||
| @ -188,7 +192,8 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> { | |||||||
|   @Override |   @Override | ||||||
|   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) |   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) | ||||||
|       throws IOException { |       throws IOException { | ||||||
|     DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema |     DatabaseSchema schema = | ||||||
|  |         getByName(null, name, getFields("database,service")); // Validate database schema | ||||||
|     return new DatabaseSchemaCsv(schema, user).importCsv(csv, dryRun); |     return new DatabaseSchemaCsv(schema, user).importCsv(csv, dryRun); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
| @ -266,21 +271,35 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> { | |||||||
|       try { |       try { | ||||||
|         table = Entity.getEntityByName(TABLE, tableFqn, "*", Include.NON_DELETED); |         table = Entity.getEntityByName(TABLE, tableFqn, "*", Include.NON_DELETED); | ||||||
|       } catch (Exception ex) { |       } catch (Exception ex) { | ||||||
|         importFailure(printer, entityNotFound(0, TABLE, tableFqn), csvRecord); |         LOG.warn("Table not found: {}, it will be created with Import.", tableFqn); | ||||||
|         processRecord = false; |         table = | ||||||
|         return; |             new Table() | ||||||
|  |                 .withService(schema.getService()) | ||||||
|  |                 .withDatabase(schema.getDatabase()) | ||||||
|  |                 .withDatabaseSchema(schema.getEntityReference()); | ||||||
|       } |       } | ||||||
| 
 | 
 | ||||||
|       // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |       // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, | ||||||
|  |       // sourceUrl, domain | ||||||
|       // Field 1,2,3,6,7 - database schema name, displayName, description |       // Field 1,2,3,6,7 - database schema name, displayName, description | ||||||
|  |       List<TagLabel> tagLabels = | ||||||
|  |           getTagLabels( | ||||||
|  |               printer, | ||||||
|  |               csvRecord, | ||||||
|  |               List.of( | ||||||
|  |                   Pair.of(4, TagLabel.TagSource.CLASSIFICATION), | ||||||
|  |                   Pair.of(5, TagLabel.TagSource.GLOSSARY), | ||||||
|  |                   Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); | ||||||
|       table |       table | ||||||
|  |           .withName(csvRecord.get(0)) | ||||||
|           .withDisplayName(csvRecord.get(1)) |           .withDisplayName(csvRecord.get(1)) | ||||||
|           .withDescription(csvRecord.get(2)) |           .withDescription(csvRecord.get(2)) | ||||||
|           .withOwner(getOwner(printer, csvRecord, 3)) |           .withOwner(getOwner(printer, csvRecord, 3)) | ||||||
|           .withTags(getTagLabels(printer, csvRecord, 4)) |           .withTags(tagLabels) | ||||||
|           .withRetentionPeriod(csvRecord.get(5)) |           .withRetentionPeriod(csvRecord.get(7)) | ||||||
|           .withSourceUrl(csvRecord.get(6)) |           .withSourceUrl(csvRecord.get(8)) | ||||||
|           .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); |           .withColumns(new ArrayList<>()) | ||||||
|  |           .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); | ||||||
| 
 | 
 | ||||||
|       if (processRecord) { |       if (processRecord) { | ||||||
|         createEntity(printer, csvRecord, table); |         createEntity(printer, csvRecord, table); | ||||||
| @ -296,6 +315,8 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> { | |||||||
|       addField(recordList, entity.getDescription()); |       addField(recordList, entity.getDescription()); | ||||||
|       addOwner(recordList, entity.getOwner()); |       addOwner(recordList, entity.getOwner()); | ||||||
|       addTagLabels(recordList, entity.getTags()); |       addTagLabels(recordList, entity.getTags()); | ||||||
|  |       addGlossaryTerms(recordList, entity.getTags()); | ||||||
|  |       addTagTiers(recordList, entity.getTags()); | ||||||
|       addField(recordList, entity.getRetentionPeriod()); |       addField(recordList, entity.getRetentionPeriod()); | ||||||
|       addField(recordList, entity.getSourceUrl()); |       addField(recordList, entity.getSourceUrl()); | ||||||
|       String domain = |       String domain = | ||||||
|  | |||||||
| @ -13,12 +13,39 @@ | |||||||
| 
 | 
 | ||||||
| package org.openmetadata.service.jdbi3; | package org.openmetadata.service.jdbi3; | ||||||
| 
 | 
 | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addField; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addOwner; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addTagLabels; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addTagTiers; | ||||||
|  | import static org.openmetadata.service.Entity.DATABASE; | ||||||
|  | import static org.openmetadata.service.Entity.DATABASE_SERVICE; | ||||||
|  | 
 | ||||||
|  | import java.io.IOException; | ||||||
|  | import java.util.ArrayList; | ||||||
|  | import java.util.Comparator; | ||||||
|  | import java.util.List; | ||||||
| import lombok.extern.slf4j.Slf4j; | import lombok.extern.slf4j.Slf4j; | ||||||
|  | import org.apache.commons.csv.CSVPrinter; | ||||||
|  | import org.apache.commons.csv.CSVRecord; | ||||||
|  | import org.apache.commons.lang3.tuple.Pair; | ||||||
|  | import org.openmetadata.csv.EntityCsv; | ||||||
|  | import org.openmetadata.schema.EntityInterface; | ||||||
| import org.openmetadata.schema.api.services.DatabaseConnection; | import org.openmetadata.schema.api.services.DatabaseConnection; | ||||||
|  | import org.openmetadata.schema.entity.data.Database; | ||||||
| import org.openmetadata.schema.entity.services.DatabaseService; | import org.openmetadata.schema.entity.services.DatabaseService; | ||||||
| import org.openmetadata.schema.entity.services.ServiceType; | import org.openmetadata.schema.entity.services.ServiceType; | ||||||
|  | import org.openmetadata.schema.type.Include; | ||||||
|  | import org.openmetadata.schema.type.TagLabel; | ||||||
|  | import org.openmetadata.schema.type.csv.CsvDocumentation; | ||||||
|  | import org.openmetadata.schema.type.csv.CsvFile; | ||||||
|  | import org.openmetadata.schema.type.csv.CsvHeader; | ||||||
|  | import org.openmetadata.schema.type.csv.CsvImportResult; | ||||||
| import org.openmetadata.service.Entity; | import org.openmetadata.service.Entity; | ||||||
|  | import org.openmetadata.service.exception.EntityNotFoundException; | ||||||
| import org.openmetadata.service.resources.services.database.DatabaseServiceResource; | import org.openmetadata.service.resources.services.database.DatabaseServiceResource; | ||||||
|  | import org.openmetadata.service.util.EntityUtil; | ||||||
|  | import org.openmetadata.service.util.FullyQualifiedName; | ||||||
| 
 | 
 | ||||||
| @Slf4j | @Slf4j | ||||||
| public class DatabaseServiceRepository | public class DatabaseServiceRepository | ||||||
| @ -33,4 +60,92 @@ public class DatabaseServiceRepository | |||||||
|         ServiceType.DATABASE); |         ServiceType.DATABASE); | ||||||
|     supportsSearch = true; |     supportsSearch = true; | ||||||
|   } |   } | ||||||
|  | 
 | ||||||
|  |   @Override | ||||||
|  |   public String exportToCsv(String name, String user) throws IOException { | ||||||
|  |     DatabaseService databaseService = | ||||||
|  |         getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate database name | ||||||
|  |     DatabaseRepository repository = (DatabaseRepository) Entity.getEntityRepository(DATABASE); | ||||||
|  |     ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("service", name); | ||||||
|  |     List<Database> databases = | ||||||
|  |         repository.listAll(repository.getFields("owner,tags,domain"), filter); | ||||||
|  |     databases.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName)); | ||||||
|  |     return new DatabaseServiceCsv(databaseService, user).exportCsv(databases); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   @Override | ||||||
|  |   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) | ||||||
|  |       throws IOException { | ||||||
|  |     // Validate database service | ||||||
|  |     DatabaseService databaseService = | ||||||
|  |         getByName(null, name, EntityUtil.Fields.EMPTY_FIELDS); // Validate glossary name | ||||||
|  |     DatabaseServiceCsv databaseServiceCsv = new DatabaseServiceCsv(databaseService, user); | ||||||
|  |     return databaseServiceCsv.importCsv(csv, dryRun); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   public static class DatabaseServiceCsv extends EntityCsv<Database> { | ||||||
|  |     public static final CsvDocumentation DOCUMENTATION = getCsvDocumentation(DATABASE_SERVICE); | ||||||
|  |     public static final List<CsvHeader> HEADERS = DOCUMENTATION.getHeaders(); | ||||||
|  |     private final DatabaseService service; | ||||||
|  | 
 | ||||||
|  |     DatabaseServiceCsv(DatabaseService service, String user) { | ||||||
|  |       super(DATABASE, DOCUMENTATION.getHeaders(), user); | ||||||
|  |       this.service = service; | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     @Override | ||||||
|  |     protected void createEntity(CSVPrinter printer, List<CSVRecord> csvRecords) throws IOException { | ||||||
|  |       CSVRecord csvRecord = getNextRecord(printer, csvRecords); | ||||||
|  |       String databaseFqn = | ||||||
|  |           FullyQualifiedName.add(service.getFullyQualifiedName(), csvRecord.get(0)); | ||||||
|  |       Database database; | ||||||
|  |       try { | ||||||
|  |         database = Entity.getEntityByName(DATABASE, databaseFqn, "*", Include.NON_DELETED); | ||||||
|  |       } catch (EntityNotFoundException ex) { | ||||||
|  |         LOG.warn("Database not found: {}, it will be created with Import.", databaseFqn); | ||||||
|  |         database = new Database().withService(service.getEntityReference()); | ||||||
|  |       } | ||||||
|  | 
 | ||||||
|  |       // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain | ||||||
|  |       // Field 1,2,3,6,7 - database service name, displayName, description | ||||||
|  |       List<TagLabel> tagLabels = | ||||||
|  |           getTagLabels( | ||||||
|  |               printer, | ||||||
|  |               csvRecord, | ||||||
|  |               List.of( | ||||||
|  |                   Pair.of(4, TagLabel.TagSource.CLASSIFICATION), | ||||||
|  |                   Pair.of(5, TagLabel.TagSource.GLOSSARY), | ||||||
|  |                   Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); | ||||||
|  |       database | ||||||
|  |           .withName(csvRecord.get(0)) | ||||||
|  |           .withDisplayName(csvRecord.get(1)) | ||||||
|  |           .withDescription(csvRecord.get(2)) | ||||||
|  |           .withOwner(getOwner(printer, csvRecord, 3)) | ||||||
|  |           .withTags(tagLabels) | ||||||
|  |           .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); | ||||||
|  | 
 | ||||||
|  |       if (processRecord) { | ||||||
|  |         createEntity(printer, csvRecord, database); | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     @Override | ||||||
|  |     protected void addRecord(CsvFile csvFile, Database entity) { | ||||||
|  |       // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, domain | ||||||
|  |       List<String> recordList = new ArrayList<>(); | ||||||
|  |       addField(recordList, entity.getName()); | ||||||
|  |       addField(recordList, entity.getDisplayName()); | ||||||
|  |       addField(recordList, entity.getDescription()); | ||||||
|  |       addOwner(recordList, entity.getOwner()); | ||||||
|  |       addTagLabels(recordList, entity.getTags()); | ||||||
|  |       addGlossaryTerms(recordList, entity.getTags()); | ||||||
|  |       addTagTiers(recordList, entity.getTags()); | ||||||
|  |       String domain = | ||||||
|  |           entity.getDomain() == null || Boolean.TRUE.equals(entity.getDomain().getInherited()) | ||||||
|  |               ? "" | ||||||
|  |               : entity.getDomain().getFullyQualifiedName(); | ||||||
|  |       addField(recordList, domain); | ||||||
|  |       addRecord(csvFile, recordList); | ||||||
|  |     } | ||||||
|  |   } | ||||||
| } | } | ||||||
|  | |||||||
| @ -38,6 +38,7 @@ import lombok.extern.slf4j.Slf4j; | |||||||
| import org.apache.commons.csv.CSVPrinter; | import org.apache.commons.csv.CSVPrinter; | ||||||
| import org.apache.commons.csv.CSVRecord; | import org.apache.commons.csv.CSVRecord; | ||||||
| import org.apache.commons.lang3.tuple.ImmutablePair; | import org.apache.commons.lang3.tuple.ImmutablePair; | ||||||
|  | import org.apache.commons.lang3.tuple.Pair; | ||||||
| import org.jdbi.v3.sqlobject.transaction.Transaction; | import org.jdbi.v3.sqlobject.transaction.Transaction; | ||||||
| import org.openmetadata.csv.CsvUtil; | import org.openmetadata.csv.CsvUtil; | ||||||
| import org.openmetadata.csv.EntityCsv; | import org.openmetadata.csv.EntityCsv; | ||||||
| @ -50,6 +51,7 @@ import org.openmetadata.schema.type.EntityReference; | |||||||
| import org.openmetadata.schema.type.Include; | import org.openmetadata.schema.type.Include; | ||||||
| import org.openmetadata.schema.type.ProviderType; | import org.openmetadata.schema.type.ProviderType; | ||||||
| import org.openmetadata.schema.type.Relationship; | import org.openmetadata.schema.type.Relationship; | ||||||
|  | import org.openmetadata.schema.type.TagLabel; | ||||||
| import org.openmetadata.schema.type.TagLabel.TagSource; | import org.openmetadata.schema.type.TagLabel.TagSource; | ||||||
| import org.openmetadata.schema.type.csv.CsvDocumentation; | import org.openmetadata.schema.type.csv.CsvDocumentation; | ||||||
| import org.openmetadata.schema.type.csv.CsvFile; | import org.openmetadata.schema.type.csv.CsvFile; | ||||||
| @ -179,7 +181,9 @@ public class GlossaryRepository extends EntityRepository<Glossary> { | |||||||
|           .withSynonyms(CsvUtil.fieldToStrings(csvRecord.get(4))) |           .withSynonyms(CsvUtil.fieldToStrings(csvRecord.get(4))) | ||||||
|           .withRelatedTerms(getEntityReferences(printer, csvRecord, 5, GLOSSARY_TERM)) |           .withRelatedTerms(getEntityReferences(printer, csvRecord, 5, GLOSSARY_TERM)) | ||||||
|           .withReferences(getTermReferences(printer, csvRecord)) |           .withReferences(getTermReferences(printer, csvRecord)) | ||||||
|           .withTags(getTagLabels(printer, csvRecord, 7)) |           .withTags( | ||||||
|  |               getTagLabels( | ||||||
|  |                   printer, csvRecord, List.of(Pair.of(7, TagLabel.TagSource.CLASSIFICATION)))) | ||||||
|           .withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER)) |           .withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER)) | ||||||
|           .withOwner(getOwner(printer, csvRecord, 9)) |           .withOwner(getOwner(printer, csvRecord, 9)) | ||||||
|           .withStatus(getTermStatus(printer, csvRecord)); |           .withStatus(getTermStatus(printer, csvRecord)); | ||||||
|  | |||||||
| @ -16,9 +16,12 @@ package org.openmetadata.service.jdbi3; | |||||||
| import static java.util.stream.Collectors.groupingBy; | import static java.util.stream.Collectors.groupingBy; | ||||||
| import static org.openmetadata.common.utils.CommonUtil.listOf; | import static org.openmetadata.common.utils.CommonUtil.listOf; | ||||||
| import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; | import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; | ||||||
|  | import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; | ||||||
| import static org.openmetadata.csv.CsvUtil.addField; | import static org.openmetadata.csv.CsvUtil.addField; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addGlossaryTerms; | ||||||
| import static org.openmetadata.csv.CsvUtil.addOwner; | import static org.openmetadata.csv.CsvUtil.addOwner; | ||||||
| import static org.openmetadata.csv.CsvUtil.addTagLabels; | import static org.openmetadata.csv.CsvUtil.addTagLabels; | ||||||
|  | import static org.openmetadata.csv.CsvUtil.addTagTiers; | ||||||
| import static org.openmetadata.schema.type.Include.ALL; | import static org.openmetadata.schema.type.Include.ALL; | ||||||
| import static org.openmetadata.schema.type.Include.NON_DELETED; | import static org.openmetadata.schema.type.Include.NON_DELETED; | ||||||
| import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | import static org.openmetadata.service.Entity.DATABASE_SCHEMA; | ||||||
| @ -35,6 +38,7 @@ import static org.openmetadata.service.util.LambdaExceptionUtil.rethrowFunction; | |||||||
| import com.google.common.collect.Streams; | import com.google.common.collect.Streams; | ||||||
| import java.io.IOException; | import java.io.IOException; | ||||||
| import java.util.ArrayList; | import java.util.ArrayList; | ||||||
|  | import java.util.Arrays; | ||||||
| import java.util.Collections; | import java.util.Collections; | ||||||
| import java.util.Date; | import java.util.Date; | ||||||
| import java.util.List; | import java.util.List; | ||||||
| @ -62,6 +66,7 @@ import org.openmetadata.schema.entity.feed.Suggestion; | |||||||
| import org.openmetadata.schema.tests.CustomMetric; | import org.openmetadata.schema.tests.CustomMetric; | ||||||
| import org.openmetadata.schema.tests.TestSuite; | import org.openmetadata.schema.tests.TestSuite; | ||||||
| import org.openmetadata.schema.type.Column; | import org.openmetadata.schema.type.Column; | ||||||
|  | import org.openmetadata.schema.type.ColumnDataType; | ||||||
| import org.openmetadata.schema.type.ColumnJoin; | import org.openmetadata.schema.type.ColumnJoin; | ||||||
| import org.openmetadata.schema.type.ColumnProfile; | import org.openmetadata.schema.type.ColumnProfile; | ||||||
| import org.openmetadata.schema.type.ColumnProfilerConfig; | import org.openmetadata.schema.type.ColumnProfilerConfig; | ||||||
| @ -774,7 +779,11 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) |   public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user) | ||||||
|       throws IOException { |       throws IOException { | ||||||
|     // Validate table |     // Validate table | ||||||
|     Table table = getByName(null, name, new Fields(allowedFields, "owner,domain,tags,columns")); |     Table table = | ||||||
|  |         getByName( | ||||||
|  |             null, | ||||||
|  |             name, | ||||||
|  |             new Fields(allowedFields, "owner,domain,tags,columns,database,service,databaseSchema")); | ||||||
|     return new TableCsv(table, user).importCsv(csv, dryRun); |     return new TableCsv(table, user).importCsv(csv, dryRun); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
| @ -1157,19 +1166,29 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|     @Override |     @Override | ||||||
|     protected void createEntity(CSVPrinter printer, List<CSVRecord> csvRecords) throws IOException { |     protected void createEntity(CSVPrinter printer, List<CSVRecord> csvRecords) throws IOException { | ||||||
|       CSVRecord csvRecord = getNextRecord(printer, csvRecords); |       CSVRecord csvRecord = getNextRecord(printer, csvRecords); | ||||||
|       // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |       // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, | ||||||
|       // column.fullyQualifiedName, column.displayName, column.description, column.dataTypeDisplay, |       // sourceUrl, domain, column.fullyQualifiedName, column.displayName, column.description, | ||||||
|       // column.tags |       // column.dataTypeDisplay, | ||||||
|  |       // column.tags, column.glossaryTerms | ||||||
|       if (processRecord) { |       if (processRecord) { | ||||||
|  |         // fields tags(4), glossaryTerms(5), tiers(6) | ||||||
|  |         List<TagLabel> tagLabels = | ||||||
|  |             getTagLabels( | ||||||
|  |                 printer, | ||||||
|  |                 csvRecord, | ||||||
|  |                 List.of( | ||||||
|  |                     Pair.of(4, TagLabel.TagSource.CLASSIFICATION), | ||||||
|  |                     Pair.of(5, TagLabel.TagSource.GLOSSARY), | ||||||
|  |                     Pair.of(6, TagLabel.TagSource.CLASSIFICATION))); | ||||||
|         table |         table | ||||||
|             .withName(csvRecord.get(0)) |             .withName(csvRecord.get(0)) | ||||||
|             .withDisplayName(csvRecord.get(1)) |             .withDisplayName(csvRecord.get(1)) | ||||||
|             .withDescription(csvRecord.get(2)) |             .withDescription(csvRecord.get(2)) | ||||||
|             .withOwner(getOwner(printer, csvRecord, 3)) |             .withOwner(getOwner(printer, csvRecord, 3)) | ||||||
|             .withTags(getTagLabels(printer, csvRecord, 4)) |             .withTags(tagLabels != null && tagLabels.isEmpty() ? null : tagLabels) | ||||||
|             .withRetentionPeriod(csvRecord.get(5)) |             .withRetentionPeriod(csvRecord.get(7)) | ||||||
|             .withSourceUrl(csvRecord.get(6)) |             .withSourceUrl(csvRecord.get(8)) | ||||||
|             .withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN)); |             .withDomain(getEntityReference(printer, csvRecord, 9, Entity.DOMAIN)); | ||||||
|         ImportResult importResult = updateColumn(printer, csvRecord); |         ImportResult importResult = updateColumn(printer, csvRecord); | ||||||
|         if (importResult.result().equals(IMPORT_FAILED)) { |         if (importResult.result().equals(IMPORT_FAILED)) { | ||||||
|           importFailure(printer, importResult.details(), csvRecord); |           importFailure(printer, importResult.details(), csvRecord); | ||||||
| @ -1202,16 +1221,71 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|       if (!processRecord) { |       if (!processRecord) { | ||||||
|         return new ImportResult(IMPORT_SKIPPED, csvRecord, ""); |         return new ImportResult(IMPORT_SKIPPED, csvRecord, ""); | ||||||
|       } |       } | ||||||
|       String columnFqn = csvRecord.get(8); |       String columnFqn = csvRecord.get(10); | ||||||
|       Column column = findColumn(table.getColumns(), columnFqn); |       Column column = findColumn(table.getColumns(), columnFqn); | ||||||
|  |       boolean columnExists = column != null; | ||||||
|       if (column == null) { |       if (column == null) { | ||||||
|         processRecord = false; |         // Create Column, if not found | ||||||
|         return new ImportResult(IMPORT_FAILED, csvRecord, columnNotFound(8, columnFqn)); |         column = | ||||||
|  |             new Column() | ||||||
|  |                 .withName(getLocalColumnName(table.getFullyQualifiedName(), columnFqn)) | ||||||
|  |                 .withFullyQualifiedName( | ||||||
|  |                     table.getFullyQualifiedName() + Entity.SEPARATOR + columnFqn); | ||||||
|       } |       } | ||||||
|       column.withDisplayName(csvRecord.get(9)); |       column.withDisplayName(csvRecord.get(11)); | ||||||
|       column.withDescription(csvRecord.get(10)); |       column.withDescription(csvRecord.get(12)); | ||||||
|       column.withDataTypeDisplay(csvRecord.get(11)); |       column.withDataTypeDisplay(csvRecord.get(13)); | ||||||
|       column.withTags(getTagLabels(printer, csvRecord, 12)); |       column.withDataType( | ||||||
|  |           nullOrEmpty(csvRecord.get(14)) ? null : ColumnDataType.fromValue(csvRecord.get(14))); | ||||||
|  |       column.withArrayDataType( | ||||||
|  |           nullOrEmpty(csvRecord.get(15)) ? null : ColumnDataType.fromValue(csvRecord.get(15))); | ||||||
|  |       column.withDataLength( | ||||||
|  |           nullOrEmpty(csvRecord.get(16)) ? null : Integer.parseInt(csvRecord.get(16))); | ||||||
|  |       List<TagLabel> tagLabels = | ||||||
|  |           getTagLabels( | ||||||
|  |               printer, | ||||||
|  |               csvRecord, | ||||||
|  |               List.of( | ||||||
|  |                   Pair.of(17, TagLabel.TagSource.CLASSIFICATION), | ||||||
|  |                   Pair.of(18, TagLabel.TagSource.GLOSSARY))); | ||||||
|  |       column.withTags(nullOrEmpty(tagLabels) ? null : tagLabels); | ||||||
|  |       column.withOrdinalPosition(nullOrEmpty(table.getColumns()) ? 0 : table.getColumns().size()); | ||||||
|  | 
 | ||||||
|  |       // If Column Does not Exist add it to the table | ||||||
|  |       if (!columnExists) { | ||||||
|  |         String[] splitColumnName = FullyQualifiedName.split(columnFqn); | ||||||
|  |         // Parent Column | ||||||
|  |         if (splitColumnName.length == 1) { | ||||||
|  |           List<Column> tableColumns = | ||||||
|  |               table.getColumns() == null ? new ArrayList<>() : table.getColumns(); | ||||||
|  |           tableColumns.add(column); | ||||||
|  |           table.withColumns(tableColumns); | ||||||
|  |         } else { | ||||||
|  |           String parentColumnFqn = | ||||||
|  |               String.join( | ||||||
|  |                   Entity.SEPARATOR, Arrays.copyOf(splitColumnName, splitColumnName.length - 1)); | ||||||
|  |           Column parentColumn = findColumn(table.getColumns(), parentColumnFqn); | ||||||
|  |           if (parentColumn == null) { | ||||||
|  |             return new ImportResult( | ||||||
|  |                 IMPORT_FAILED, | ||||||
|  |                 csvRecord, | ||||||
|  |                 "Parent Column not found. Check the order of the columns in the CSV file."); | ||||||
|  |           } | ||||||
|  | 
 | ||||||
|  |           // Update Name And Ordinal position in the parent column | ||||||
|  |           column.withName(splitColumnName[splitColumnName.length - 1]); | ||||||
|  |           column.withOrdinalPosition( | ||||||
|  |               nullOrEmpty(parentColumn.getChildren()) ? 0 : parentColumn.getChildren().size()); | ||||||
|  |           // Add this column to children of Parent | ||||||
|  |           List<Column> children = | ||||||
|  |               nullOrEmpty(parentColumn.getChildren()) | ||||||
|  |                   ? new ArrayList<>() | ||||||
|  |                   : parentColumn.getChildren(); | ||||||
|  |           children.add(column); | ||||||
|  |           parentColumn.withChildren(children); | ||||||
|  |         } | ||||||
|  |       } | ||||||
|  | 
 | ||||||
|       return new ImportResult(IMPORT_SUCCESS, csvRecord, ENTITY_UPDATED); |       return new ImportResult(IMPORT_SUCCESS, csvRecord, ENTITY_UPDATED); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| @ -1226,6 +1300,8 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|       addField(recordList, entity.getDescription()); |       addField(recordList, entity.getDescription()); | ||||||
|       addOwner(recordList, entity.getOwner()); |       addOwner(recordList, entity.getOwner()); | ||||||
|       addTagLabels(recordList, entity.getTags()); |       addTagLabels(recordList, entity.getTags()); | ||||||
|  |       addGlossaryTerms(recordList, entity.getTags()); | ||||||
|  |       addTagTiers(recordList, entity.getTags()); | ||||||
|       addField(recordList, entity.getRetentionPeriod()); |       addField(recordList, entity.getRetentionPeriod()); | ||||||
|       addField(recordList, entity.getSourceUrl()); |       addField(recordList, entity.getSourceUrl()); | ||||||
|       String domain = |       String domain = | ||||||
| @ -1243,7 +1319,7 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|     private void addRecord( |     private void addRecord( | ||||||
|         CsvFile csvFile, List<String> recordList, Column column, boolean emptyTableDetails) { |         CsvFile csvFile, List<String> recordList, Column column, boolean emptyTableDetails) { | ||||||
|       if (emptyTableDetails) { |       if (emptyTableDetails) { | ||||||
|         for (int i = 0; i < 8; i++) { |         for (int i = 0; i < 10; i++) { | ||||||
|           addField(recordList, (String) null); // Add empty fields for table information |           addField(recordList, (String) null); // Add empty fields for table information | ||||||
|         } |         } | ||||||
|       } |       } | ||||||
| @ -1253,7 +1329,14 @@ public class TableRepository extends EntityRepository<Table> { | |||||||
|       addField(recordList, column.getDisplayName()); |       addField(recordList, column.getDisplayName()); | ||||||
|       addField(recordList, column.getDescription()); |       addField(recordList, column.getDescription()); | ||||||
|       addField(recordList, column.getDataTypeDisplay()); |       addField(recordList, column.getDataTypeDisplay()); | ||||||
|  |       addField(recordList, column.getDataType() == null ? null : column.getDataType().value()); | ||||||
|  |       addField( | ||||||
|  |           recordList, column.getArrayDataType() == null ? null : column.getArrayDataType().value()); | ||||||
|  |       addField( | ||||||
|  |           recordList, | ||||||
|  |           column.getDataLength() == null ? null : String.valueOf(column.getDataLength())); | ||||||
|       addTagLabels(recordList, column.getTags()); |       addTagLabels(recordList, column.getTags()); | ||||||
|  |       addGlossaryTerms(recordList, column.getTags()); | ||||||
|       addRecord(csvFile, recordList); |       addRecord(csvFile, recordList); | ||||||
|       listOrEmpty(column.getChildren()) |       listOrEmpty(column.getChildren()) | ||||||
|           .forEach(c -> addRecord(csvFile, new ArrayList<>(), c, true)); |           .forEach(c -> addRecord(csvFile, new ArrayList<>(), c, true)); | ||||||
|  | |||||||
| @ -22,6 +22,7 @@ import io.swagger.v3.oas.annotations.media.Schema; | |||||||
| import io.swagger.v3.oas.annotations.parameters.RequestBody; | import io.swagger.v3.oas.annotations.parameters.RequestBody; | ||||||
| import io.swagger.v3.oas.annotations.responses.ApiResponse; | import io.swagger.v3.oas.annotations.responses.ApiResponse; | ||||||
| import io.swagger.v3.oas.annotations.tags.Tag; | import io.swagger.v3.oas.annotations.tags.Tag; | ||||||
|  | import java.io.IOException; | ||||||
| import java.util.List; | import java.util.List; | ||||||
| import java.util.UUID; | import java.util.UUID; | ||||||
| import java.util.stream.Collectors; | import java.util.stream.Collectors; | ||||||
| @ -55,6 +56,7 @@ import org.openmetadata.schema.entity.services.connections.TestConnectionResult; | |||||||
| import org.openmetadata.schema.type.EntityHistory; | import org.openmetadata.schema.type.EntityHistory; | ||||||
| import org.openmetadata.schema.type.Include; | import org.openmetadata.schema.type.Include; | ||||||
| import org.openmetadata.schema.type.MetadataOperation; | import org.openmetadata.schema.type.MetadataOperation; | ||||||
|  | import org.openmetadata.schema.type.csv.CsvImportResult; | ||||||
| import org.openmetadata.service.Entity; | import org.openmetadata.service.Entity; | ||||||
| import org.openmetadata.service.jdbi3.DatabaseServiceRepository; | import org.openmetadata.service.jdbi3.DatabaseServiceRepository; | ||||||
| import org.openmetadata.service.resources.Collection; | import org.openmetadata.service.resources.Collection; | ||||||
| @ -408,6 +410,64 @@ public class DatabaseServiceResource | |||||||
|     return patchInternal(uriInfo, securityContext, id, patch); |     return patchInternal(uriInfo, securityContext, id, patch); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|  |   @GET | ||||||
|  |   @Path("/name/{name}/export") | ||||||
|  |   @Produces(MediaType.TEXT_PLAIN) | ||||||
|  |   @Valid | ||||||
|  |   @Operation( | ||||||
|  |       operationId = "exportDatabaseServices", | ||||||
|  |       summary = "Export database service in CSV format", | ||||||
|  |       responses = { | ||||||
|  |         @ApiResponse( | ||||||
|  |             responseCode = "200", | ||||||
|  |             description = "Exported csv with services from the database services", | ||||||
|  |             content = | ||||||
|  |                 @Content( | ||||||
|  |                     mediaType = "application/json", | ||||||
|  |                     schema = @Schema(implementation = String.class))) | ||||||
|  |       }) | ||||||
|  |   public String exportCsv( | ||||||
|  |       @Context SecurityContext securityContext, | ||||||
|  |       @Parameter(description = "Name of the Database Service", schema = @Schema(type = "string")) | ||||||
|  |           @PathParam("name") | ||||||
|  |           String name) | ||||||
|  |       throws IOException { | ||||||
|  |     return exportCsvInternal(securityContext, name); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|  |   @PUT | ||||||
|  |   @Path("/name/{name}/import") | ||||||
|  |   @Consumes(MediaType.TEXT_PLAIN) | ||||||
|  |   @Valid | ||||||
|  |   @Operation( | ||||||
|  |       operationId = "importDatabaseService", | ||||||
|  |       summary = "Import service from CSV to update database service (no creation allowed)", | ||||||
|  |       responses = { | ||||||
|  |         @ApiResponse( | ||||||
|  |             responseCode = "200", | ||||||
|  |             description = "Import result", | ||||||
|  |             content = | ||||||
|  |                 @Content( | ||||||
|  |                     mediaType = "application/json", | ||||||
|  |                     schema = @Schema(implementation = CsvImportResult.class))) | ||||||
|  |       }) | ||||||
|  |   public CsvImportResult importCsv( | ||||||
|  |       @Context SecurityContext securityContext, | ||||||
|  |       @Parameter(description = "Name of the Database Service", schema = @Schema(type = "string")) | ||||||
|  |           @PathParam("name") | ||||||
|  |           String name, | ||||||
|  |       @Parameter( | ||||||
|  |               description = | ||||||
|  |                   "Dry-run when true is used for validating the CSV without really importing it. (default=true)", | ||||||
|  |               schema = @Schema(type = "boolean")) | ||||||
|  |           @DefaultValue("true") | ||||||
|  |           @QueryParam("dryRun") | ||||||
|  |           boolean dryRun, | ||||||
|  |       String csv) | ||||||
|  |       throws IOException { | ||||||
|  |     return importCsvInternal(securityContext, name, csv, dryRun); | ||||||
|  |   } | ||||||
|  | 
 | ||||||
|   @DELETE |   @DELETE | ||||||
|   @Path("/{id}") |   @Path("/{id}") | ||||||
|   @Operation( |   @Operation( | ||||||
|  | |||||||
| @ -43,6 +43,24 @@ | |||||||
|         "`PII.Sensitive;PersonalData.Personal`" |         "`PII.Sensitive;PersonalData.Personal`" | ||||||
|       ] |       ] | ||||||
|     }, |     }, | ||||||
|  |     { | ||||||
|  |       "name": "glossaryTerms", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Glossary.GlossaryTerm1`", | ||||||
|  |         "`Glossary.GlossaryTerm1.GlossaryTerm2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "tiers", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified tier tags names associated with the table separated by ';'.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Tier.Tier1`", | ||||||
|  |         "`Tier.Tier2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|     { |     { | ||||||
|       "name": "retentionPeriod", |       "name": "retentionPeriod", | ||||||
|       "required": false, |       "required": false, | ||||||
|  | |||||||
| @ -43,6 +43,24 @@ | |||||||
|         "`PII.Sensitive;PersonalData.Personal`" |         "`PII.Sensitive;PersonalData.Personal`" | ||||||
|       ] |       ] | ||||||
|     }, |     }, | ||||||
|  |     { | ||||||
|  |       "name": "glossaryTerms", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Glossary.GlossaryTerm1`", | ||||||
|  |         "`Glossary.GlossaryTerm1.GlossaryTerm2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "tiers", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified tier tags names associated with the table separated by ';'.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Tier.Tier1`", | ||||||
|  |         "`Tier.Tier2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|     { |     { | ||||||
|       "name": "retentionPeriod", |       "name": "retentionPeriod", | ||||||
|       "required": false, |       "required": false, | ||||||
|  | |||||||
| @ -0,0 +1,73 @@ | |||||||
|  | { | ||||||
|  |   "summary": "Database Service CSV file is used for importing and exporting service metadata from and to an **existing** database service.", | ||||||
|  |   "headers": [ | ||||||
|  |     { | ||||||
|  |       "name": "name", | ||||||
|  |       "required": true, | ||||||
|  |       "description": "The name of the database schema being updated.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`users`, `customers`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "displayName", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Display name for the table.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`User Schema`, `Customer Schema`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "description", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Description for the database schema in Markdown format.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Customer Schema` that contains all the tables related to customer entity." | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "owner", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Owner names separated by ';'. For team owner, include prefix team. For user owner, include prefix user.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`team;marketing`", | ||||||
|  |         "`user;john`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "tags", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified classification tag names associated with the database schema separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`PII.Sensitive`", | ||||||
|  |         "`PII.Sensitive;PersonalData.Personal`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "glossaryTerms", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified glossary term names associated with the database schema separated by ';'. Tags derived from the glossary term are automatically applied to the database schema.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Glossary.GlossaryTerm1`", | ||||||
|  |         "`Glossary.GlossaryTerm1.GlossaryTerm2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "tiers", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified tier tags names associated with the table separated by ';'.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Tier.Tier1`", | ||||||
|  |         "`Tier.Tier2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "domain", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Domain to which the database schema belongs to", | ||||||
|  |       "examples": [ | ||||||
|  |         "Marketing", "Sales" | ||||||
|  |       ] | ||||||
|  |     } | ||||||
|  |   ] | ||||||
|  | } | ||||||
| @ -43,6 +43,24 @@ | |||||||
|         "`PII.Sensitive;PersonalData.Personal`" |         "`PII.Sensitive;PersonalData.Personal`" | ||||||
|       ] |       ] | ||||||
|     }, |     }, | ||||||
|  |     { | ||||||
|  |       "name": "glossaryTerms", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified glossary term names associated with the table separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Glossary.GlossaryTerm1`", | ||||||
|  |         "`Glossary.GlossaryTerm1.GlossaryTerm2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "tiers", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified tier tags names associated with the table separated by ';'.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Tier.Tier1`", | ||||||
|  |         "`Tier.Tier2`" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|     { |     { | ||||||
|       "name": "retentionPeriod", |       "name": "retentionPeriod", | ||||||
|       "required": false, |       "required": false, | ||||||
| @ -99,6 +117,31 @@ | |||||||
|         "array<int>", "map<int, string>" |         "array<int>", "map<int, string>" | ||||||
|       ] |       ] | ||||||
|     }, |     }, | ||||||
|  |     { | ||||||
|  |       "name": "column.dataType", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Actual Column data type.", | ||||||
|  |       "examples": [ | ||||||
|  |         "BLOB", "DATE" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "column.arrayDataType", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "In case of data Type being Array, type of Array Data.", | ||||||
|  |       "examples": [ | ||||||
|  |         "BLOB", "DATE" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|  |      | ||||||
|  |     { | ||||||
|  |       "name": "column.dataLength", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Data Length of Column in case of CHAR, VARCHAR, BINARY etc.", | ||||||
|  |       "examples": [ | ||||||
|  |         "36" | ||||||
|  |       ] | ||||||
|  |     }, | ||||||
|     { |     { | ||||||
|       "name": "column.tags", |       "name": "column.tags", | ||||||
|       "required": false, |       "required": false, | ||||||
| @ -107,6 +150,15 @@ | |||||||
|         "`PII.Sensitive`", |         "`PII.Sensitive`", | ||||||
|         "`PII.Sensitive;PersonalData.Personal`" |         "`PII.Sensitive;PersonalData.Personal`" | ||||||
|       ] |       ] | ||||||
|  |     }, | ||||||
|  |     { | ||||||
|  |       "name": "column.glossaryTerms", | ||||||
|  |       "required": false, | ||||||
|  |       "description": "Fully qualified glossary term names associated with the column separated by ';'.. Tags automatically derived along with some glossaryTerm will be in `tags`.", | ||||||
|  |       "examples": [ | ||||||
|  |         "`Glossary.GlossaryTerm1`", | ||||||
|  |         "`Glossary.GlossaryTerm1.GlossaryTerm2`" | ||||||
|  |       ] | ||||||
|     } |     } | ||||||
|   ] |   ] | ||||||
| } | } | ||||||
| @ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows; | |||||||
| import static org.openmetadata.csv.EntityCsvTest.assertSummary; | import static org.openmetadata.csv.EntityCsvTest.assertSummary; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.createCsv; | import static org.openmetadata.csv.EntityCsvTest.createCsv; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | ||||||
|  | import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; | ||||||
| import static org.openmetadata.service.util.EntityUtil.getFqn; | import static org.openmetadata.service.util.EntityUtil.getFqn; | ||||||
| import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; | import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; | ||||||
| import static org.openmetadata.service.util.TestUtils.assertListNotEmpty; | import static org.openmetadata.service.util.TestUtils.assertListNotEmpty; | ||||||
| @ -44,6 +45,7 @@ import org.openmetadata.csv.EntityCsv; | |||||||
| import org.openmetadata.schema.api.data.CreateDatabase; | import org.openmetadata.schema.api.data.CreateDatabase; | ||||||
| import org.openmetadata.schema.api.data.CreateDatabaseSchema; | import org.openmetadata.schema.api.data.CreateDatabaseSchema; | ||||||
| import org.openmetadata.schema.entity.data.Database; | import org.openmetadata.schema.entity.data.Database; | ||||||
|  | import org.openmetadata.schema.entity.data.DatabaseSchema; | ||||||
| import org.openmetadata.schema.type.ApiStatus; | import org.openmetadata.schema.type.ApiStatus; | ||||||
| import org.openmetadata.schema.type.EntityReference; | import org.openmetadata.schema.type.EntityReference; | ||||||
| import org.openmetadata.schema.type.csv.CsvImportResult; | import org.openmetadata.schema.type.csv.CsvImportResult; | ||||||
| @ -117,7 +119,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat | |||||||
|     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain | ||||||
|     // Update databaseSchema with invalid tags field |     // Update databaseSchema with invalid tags field | ||||||
|     String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS)); |     String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS)); | ||||||
|     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,"; |     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; | ||||||
|     String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null); |     String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null); | ||||||
|     CsvImportResult result = importCsv(databaseName, csv, false); |     CsvImportResult result = importCsv(databaseName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
| @ -127,18 +129,27 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat | |||||||
|         }; |         }; | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
| 
 | 
 | ||||||
|     // Existing schema can be updated. New schema can't be created. |     //  invalid tag it will give error. | ||||||
|     record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,"; |     record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; | ||||||
|     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); |     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); | ||||||
|     result = importCsv(databaseName, csv, false); |     result = importCsv(databaseName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
|     String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing"); |  | ||||||
|     expectedRows = |     expectedRows = | ||||||
|         new String[] { |         new String[] { | ||||||
|           resultsHeader, |           resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag")) | ||||||
|           getFailedRecord(record, entityNotFound(0, Entity.DATABASE_SCHEMA, schemaFqn)) |  | ||||||
|         }; |         }; | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
|  | 
 | ||||||
|  |     // databaseSchema will be created if it does not exist | ||||||
|  |     String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing"); | ||||||
|  |     record = "non-existing,dsp1,dsc1,,,,,,,"; | ||||||
|  |     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); | ||||||
|  |     result = importCsv(databaseName, csv, false); | ||||||
|  |     assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); | ||||||
|  |     expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")}; | ||||||
|  |     assertRows(result, expectedRows); | ||||||
|  |     DatabaseSchema createdSchema = schemaTest.getEntityByName(schemaFqn, "id", ADMIN_AUTH_HEADERS); | ||||||
|  |     assertEquals(schemaFqn, createdSchema.getFullyQualifiedName()); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   @Test |   @Test | ||||||
| @ -150,11 +161,12 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat | |||||||
|         schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName()); |         schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName()); | ||||||
|     schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS); |     schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS); | ||||||
| 
 | 
 | ||||||
|     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |     // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers, retentionPeriod, | ||||||
|  |     // sourceUrl, domain | ||||||
|     // Update terms with change in description |     // Update terms with change in description | ||||||
|     String record = |     String record = | ||||||
|         String.format( |         String.format( | ||||||
|             "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s", |             "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", | ||||||
|             user1, escapeCsv(DOMAIN.getFullyQualifiedName())); |             user1, escapeCsv(DOMAIN.getFullyQualifiedName())); | ||||||
| 
 | 
 | ||||||
|     // Update created entity with changes |     // Update created entity with changes | ||||||
|  | |||||||
| @ -25,6 +25,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows; | |||||||
| import static org.openmetadata.csv.EntityCsvTest.assertSummary; | import static org.openmetadata.csv.EntityCsvTest.assertSummary; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.createCsv; | import static org.openmetadata.csv.EntityCsvTest.createCsv; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | ||||||
|  | import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; | ||||||
| import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; | import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS; | ||||||
| import static org.openmetadata.service.util.TestUtils.assertListNotNull; | import static org.openmetadata.service.util.TestUtils.assertListNotNull; | ||||||
| import static org.openmetadata.service.util.TestUtils.assertListNull; | import static org.openmetadata.service.util.TestUtils.assertListNull; | ||||||
| @ -118,7 +119,7 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea | |||||||
|     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain | ||||||
|     // Create table with invalid tags field |     // Create table with invalid tags field | ||||||
|     String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS)); |     String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS)); | ||||||
|     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,"; |     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,"; | ||||||
|     String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); |     String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); | ||||||
|     CsvImportResult result = importCsv(schemaName, csv, false); |     CsvImportResult result = importCsv(schemaName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
| @ -128,17 +129,27 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea | |||||||
|         }; |         }; | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
| 
 | 
 | ||||||
|     // Existing table can be updated. New table can't be created. |     // Tag will cause failure | ||||||
|     record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,"; |     record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,,,"; | ||||||
|     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); |     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); | ||||||
|     result = importCsv(schemaName, csv, false); |     result = importCsv(schemaName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
|     String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing"); |  | ||||||
|     expectedRows = |     expectedRows = | ||||||
|         new String[] { |         new String[] { | ||||||
|           resultsHeader, getFailedRecord(record, entityNotFound(0, Entity.TABLE, tableFqn)) |           resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag")) | ||||||
|         }; |         }; | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
|  | 
 | ||||||
|  |     // non-existing table will cause | ||||||
|  |     record = "non-existing,dsp1,dsc1,,,,,,,"; | ||||||
|  |     String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing"); | ||||||
|  |     csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null); | ||||||
|  |     result = importCsv(schemaName, csv, false); | ||||||
|  |     assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); | ||||||
|  |     expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity created")}; | ||||||
|  |     assertRows(result, expectedRows); | ||||||
|  |     Table table = tableTest.getEntityByName(tableFqn, "id", ADMIN_AUTH_HEADERS); | ||||||
|  |     assertEquals(tableFqn, table.getFullyQualifiedName()); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
|   @Test |   @Test | ||||||
| @ -155,7 +166,7 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea | |||||||
|     List<String> updateRecords = |     List<String> updateRecords = | ||||||
|         listOf( |         listOf( | ||||||
|             String.format( |             String.format( | ||||||
|                 "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s", |                 "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s", | ||||||
|                 user1, escapeCsv(DOMAIN.getFullyQualifiedName()))); |                 user1, escapeCsv(DOMAIN.getFullyQualifiedName()))); | ||||||
| 
 | 
 | ||||||
|     // Update created entity with changes |     // Update created entity with changes | ||||||
|  | |||||||
| @ -32,6 +32,7 @@ import static org.openmetadata.csv.EntityCsvTest.assertRows; | |||||||
| import static org.openmetadata.csv.EntityCsvTest.assertSummary; | import static org.openmetadata.csv.EntityCsvTest.assertSummary; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.createCsv; | import static org.openmetadata.csv.EntityCsvTest.createCsv; | ||||||
| import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | import static org.openmetadata.csv.EntityCsvTest.getFailedRecord; | ||||||
|  | import static org.openmetadata.csv.EntityCsvTest.getSuccessRecord; | ||||||
| import static org.openmetadata.schema.type.ColumnDataType.ARRAY; | import static org.openmetadata.schema.type.ColumnDataType.ARRAY; | ||||||
| import static org.openmetadata.schema.type.ColumnDataType.BIGINT; | import static org.openmetadata.schema.type.ColumnDataType.BIGINT; | ||||||
| import static org.openmetadata.schema.type.ColumnDataType.BINARY; | import static org.openmetadata.schema.type.ColumnDataType.BINARY; | ||||||
| @ -2294,7 +2295,7 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> { | |||||||
|     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain | ||||||
|     // Create table with invalid tags field |     // Create table with invalid tags field | ||||||
|     String resultsHeader = recordToString(EntityCsv.getResultHeaders(TableCsv.HEADERS)); |     String resultsHeader = recordToString(EntityCsv.getResultHeaders(TableCsv.HEADERS)); | ||||||
|     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,c1,c1,c1,INT,"; |     String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,,,,c1,c1,c1,,INT,,,,"; | ||||||
|     String csv = createCsv(TableCsv.HEADERS, listOf(record), null); |     String csv = createCsv(TableCsv.HEADERS, listOf(record), null); | ||||||
|     CsvImportResult result = importCsv(tableName, csv, false); |     CsvImportResult result = importCsv(tableName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
| @ -2306,26 +2307,23 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> { | |||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
| 
 | 
 | ||||||
|     // Add an invalid column tag |     // Add an invalid column tag | ||||||
|     record = "s1,dsp1,dsc1,,,,,,c1,,,,Tag.invalidTag"; |     record = "s1,dsp1,dsc1,,,,,,,,c1,,,,INT,,,Tag.invalidTag,"; | ||||||
|     csv = createCsv(TableCsv.HEADERS, listOf(record), null); |     csv = createCsv(TableCsv.HEADERS, listOf(record), null); | ||||||
|     result = importCsv(tableName, csv, false); |     result = importCsv(tableName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); | ||||||
|     expectedRows = |     expectedRows = | ||||||
|         new String[] { |         new String[] { | ||||||
|           resultsHeader, |           resultsHeader, | ||||||
|           getFailedRecord(record, EntityCsv.entityNotFound(12, "tag", "Tag.invalidTag")) |           getFailedRecord(record, EntityCsv.entityNotFound(17, "tag", "Tag.invalidTag")) | ||||||
|         }; |         }; | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
| 
 | 
 | ||||||
|     // Update a non existing column |     // Update a non-existing column, this should create a new column with name "nonExistingColumn" | ||||||
|     record = "s1,dsp1,dsc1,,,,,,nonExistingColumn,,,,"; |     record = "s1,dsp1,dsc1,,,,,,,,nonExistingColumn,,,,INT,,,,"; | ||||||
|     csv = createCsv(TableCsv.HEADERS, listOf(record), null); |     csv = createCsv(TableCsv.HEADERS, listOf(record), null); | ||||||
|     result = importCsv(tableName, csv, false); |     result = importCsv(tableName, csv, false); | ||||||
|     assertSummary(result, ApiStatus.FAILURE, 2, 1, 1); |     assertSummary(result, ApiStatus.SUCCESS, 2, 2, 0); | ||||||
|     expectedRows = |     expectedRows = new String[] {resultsHeader, getSuccessRecord(record, "Entity updated")}; | ||||||
|         new String[] { |  | ||||||
|           resultsHeader, getFailedRecord(record, EntityCsv.columnNotFound(8, "nonExistingColumn")) |  | ||||||
|         }; |  | ||||||
|     assertRows(result, expectedRows); |     assertRows(result, expectedRows); | ||||||
|   } |   } | ||||||
| 
 | 
 | ||||||
| @ -2341,17 +2339,18 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> { | |||||||
|         createRequest("s1").withColumns(listOf(c1, c2, c3)).withTableConstraints(null); |         createRequest("s1").withColumns(listOf(c1, c2, c3)).withTableConstraints(null); | ||||||
|     Table table = createEntity(createTable, ADMIN_AUTH_HEADERS); |     Table table = createEntity(createTable, ADMIN_AUTH_HEADERS); | ||||||
| 
 | 
 | ||||||
|     // Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain |     // Headers: name, displayName, description, owner, tags, glossaryTerms, tiers retentionPeriod, | ||||||
|  |     // sourceUrl, domain | ||||||
|     // Update terms with change in description |     // Update terms with change in description | ||||||
|     List<String> updateRecords = |     List<String> updateRecords = | ||||||
|         listOf( |         listOf( | ||||||
|             String.format( |             String.format( | ||||||
|                 "s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s,c1," |                 "s1,dsp1,new-dsc1,user;%s,,,Tier.Tier1,P23DT23H,http://test.com,%s,c1," | ||||||
|                     + "dsp1-new,desc1,type,PII.Sensitive", |                     + "dsp1-new,desc1,type,STRUCT,,,PII.Sensitive,", | ||||||
|                 user1, escapeCsv(DOMAIN.getFullyQualifiedName())), |                 user1, escapeCsv(DOMAIN.getFullyQualifiedName())), | ||||||
|             ",,,,,,,,c1.c11,dsp11-new,desc11,type1,PII.Sensitive", |             ",,,,,,,,,,c1.c11,dsp11-new,desc11,type1,INT,,,PII.Sensitive,", | ||||||
|             ",,,,,,,,c2,,,,", |             ",,,,,,,,,,c2,,,type1,INT,,,,", | ||||||
|             ",,,,,,,,c3,,,,"); |             ",,,,,,,,,,c3,,,type1,INT,,,,"); | ||||||
| 
 | 
 | ||||||
|     // Update created entity with changes |     // Update created entity with changes | ||||||
|     importCsvAndValidate(table.getFullyQualifiedName(), TableCsv.HEADERS, null, updateRecords); |     importCsvAndValidate(table.getFullyQualifiedName(), TableCsv.HEADERS, null, updateRecords); | ||||||
|  | |||||||
| @ -413,10 +413,10 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo | |||||||
|     List<String> createRecords = |     List<String> createRecords = | ||||||
|         listOf( |         listOf( | ||||||
|             String.format( |             String.format( | ||||||
|                 ",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s", |                 ",g1,dsp1,\"dsc1,1\",h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s", | ||||||
|                 user1, user2, user1, "Approved"), |                 user1, user2, user1, "Approved"), | ||||||
|             String.format( |             String.format( | ||||||
|                 ",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s", |                 ",g2,dsp2,dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s", | ||||||
|                 user1, user2, "Approved"), |                 user1, user2, "Approved"), | ||||||
|             String.format( |             String.format( | ||||||
|                 "importExportTest.g1,g11,dsp2,dsc11,h1;h3;h3,,,,%s,team;%s,%s", |                 "importExportTest.g1,g11,dsp2,dsc11,h1;h3;h3,,,,%s,team;%s,%s", | ||||||
| @ -426,10 +426,10 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo | |||||||
|     List<String> updateRecords = |     List<String> updateRecords = | ||||||
|         listOf( |         listOf( | ||||||
|             String.format( |             String.format( | ||||||
|                 ",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,Tier.Tier1,%s;%s,user;%s,%s", |                 ",g1,dsp1,new-dsc1,h1;h2;h3,,term1;http://term1,PII.None,%s;%s,user;%s,%s", | ||||||
|                 user1, user2, user1, "Approved"), |                 user1, user2, user1, "Approved"), | ||||||
|             String.format( |             String.format( | ||||||
|                 ",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,Tier.Tier2,%s,user;%s,%s", |                 ",g2,dsp2,new-dsc3,h1;h3;h3,,term2;https://term2,PII.NonSensitive,%s,user;%s,%s", | ||||||
|                 user1, user2, "Approved"), |                 user1, user2, "Approved"), | ||||||
|             String.format( |             String.format( | ||||||
|                 "importExportTest.g1,g11,dsp2,new-dsc11,h1;h3;h3,,,,%s,team;%s,%s", |                 "importExportTest.g1,g11,dsp2,new-dsc11,h1;h3;h3,,,,%s,team;%s,%s", | ||||||
| @ -437,7 +437,7 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo | |||||||
| 
 | 
 | ||||||
|     // Add new row to existing rows |     // Add new row to existing rows | ||||||
|     List<String> newRecords = |     List<String> newRecords = | ||||||
|         listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,Tier.Tier3,,,Approved"); |         listOf(",g3,dsp0,dsc0,h1;h2;h3,,term0;http://term0,PII.Sensitive,,,Approved"); | ||||||
|     testImportExport( |     testImportExport( | ||||||
|         glossary.getName(), GlossaryCsv.HEADERS, createRecords, updateRecords, newRecords); |         glossary.getName(), GlossaryCsv.HEADERS, createRecords, updateRecords, newRecords); | ||||||
|   } |   } | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 Mohit Yadav
						Mohit Yadav