14493 Part 1 - Support for database schema, table import export (#14494)

* Fixes #14493 - Import & export support for database

* Import export support for database schema
This commit is contained in:
Suresh Srinivas 2023-12-25 13:09:07 -08:00 committed by GitHub
parent 496ae26cef
commit e98ae8aa2a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 648 additions and 25 deletions

View File

@ -153,17 +153,19 @@ public abstract class EntityCsv<T extends EntityInterface> {
/** Owner field is in entityType;entityName format */
public EntityReference getOwner(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber)
throws IOException {
String owner = csvRecord.get(fieldNumber);
if (nullOrEmpty(owner)) {
String ownerField = csvRecord.get(fieldNumber);
if (nullOrEmpty(ownerField)) {
return null;
}
List<String> list = CsvUtil.fieldToStrings(owner);
List<String> list = CsvUtil.fieldToStrings(ownerField);
if (list.size() != 2) {
importFailure(printer, invalidOwner(fieldNumber), csvRecord);
return null;
}
return getEntityReference(printer, csvRecord, fieldNumber, list.get(0), list.get(1));
EntityReference owner =
getEntityReference(printer, csvRecord, fieldNumber, list.get(0), list.get(1));
return owner == null || Boolean.TRUE.equals(owner.getInherited()) ? null : owner;
}
/** Owner field is in entityName format */
@ -218,7 +220,7 @@ public abstract class EntityCsv<T extends EntityInterface> {
}
EntityInterface entity = getEntityByName(entityType, fqn);
if (entity == null) {
importFailure(printer, entityNotFound(fieldNumber, fqn), csvRecord);
importFailure(printer, entityNotFound(fieldNumber, entityType, fqn), csvRecord);
processRecord = false;
return null;
}
@ -283,7 +285,7 @@ public abstract class EntityCsv<T extends EntityInterface> {
private Iterator<CSVRecord> parse(String csv) {
Reader in = new StringReader(csv);
try {
return CSVFormat.DEFAULT.parse(in).iterator();
return CSVFormat.DEFAULT.withEscape('\\').parse(in).iterator();
} catch (IOException e) {
documentFailure(failed(e.getMessage(), CsvErrorType.PARSER_FAILURE));
}
@ -326,11 +328,9 @@ public abstract class EntityCsv<T extends EntityInterface> {
return;
}
// Finally, convert record into entity for importing
T entity = toEntity(resultsPrinter, csvRecord);
T entity = toEntity(resultsPrinter, csvRecord); // Convert record into entity for importing
if (entity != null) {
// Finally, create entities
createEntity(resultsPrinter, csvRecord, entity);
createEntity(resultsPrinter, csvRecord, entity); // Finally, create entities
}
}
@ -398,8 +398,8 @@ public abstract class EntityCsv<T extends EntityInterface> {
return String.format(FIELD_ERROR_MSG, CsvErrorType.INVALID_FIELD, field + 1, error);
}
public static String entityNotFound(int field, String fqn) {
String error = String.format("Entity %s not found", fqn);
public static String entityNotFound(int field, String entityType, String fqn) {
String error = String.format("Entity %s of type %s not found", fqn, entityType);
return String.format(FIELD_ERROR_MSG, CsvErrorType.INVALID_FIELD, field + 1, error);
}

View File

@ -13,17 +13,32 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.service.Entity.DATABASE_SCHEMA;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.EntityInterface;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.entity.data.DatabaseSchema;
import org.openmetadata.schema.entity.services.DatabaseService;
import org.openmetadata.schema.type.DatabaseProfilerConfig;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.databases.DatabaseResource;
import org.openmetadata.service.util.EntityUtil;
@ -85,6 +100,26 @@ public class DatabaseRepository extends EntityRepository<Database> {
return Entity.getEntity(entity.getService(), fields, Include.ALL);
}
@Override
public String exportToCsv(String name, String user) throws IOException {
Database database = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database name
DatabaseSchemaRepository repository =
(DatabaseSchemaRepository) Entity.getEntityRepository(DATABASE_SCHEMA);
ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("database", name);
List<DatabaseSchema> schemas =
repository.listAll(repository.getFields("owner,tags,domain"), filter);
schemas.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseCsv(database, user).exportCsv(schemas);
}
@Override
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
Database database = getByName(null, name, Fields.EMPTY_FIELDS); // Validate glossary name
DatabaseCsv databaseCsv = new DatabaseCsv(database, user);
return databaseCsv.importCsv(csv, dryRun);
}
public void setFields(Database database, Fields fields) {
database.setService(getContainer(database.getId()));
database.setSourceHash(fields.contains("sourceHash") ? database.getSourceHash() : null);
@ -180,4 +215,72 @@ public class DatabaseRepository extends EntityRepository<Database> {
recordChange("sourceHash", original.getSourceHash(), updated.getSourceHash());
}
}
public static class DatabaseCsv extends EntityCsv<DatabaseSchema> {
public static final CsvDocumentation DOCUMENTATION = getCsvDocumentation(Entity.DATABASE);
public static final List<CsvHeader> HEADERS = DOCUMENTATION.getHeaders();
private final Database database;
DatabaseCsv(Database database, String user) {
super(DATABASE_SCHEMA, DOCUMENTATION.getHeaders(), user);
this.database = database;
}
@Override
protected DatabaseSchema toEntity(CSVPrinter printer, CSVRecord csvRecord) throws IOException {
String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), csvRecord.get(0));
DatabaseSchema schema;
try {
schema = Entity.getEntityByName(DATABASE_SCHEMA, schemaFqn, "*", Include.NON_DELETED);
} catch (Exception ex) {
importFailure(printer, entityNotFound(0, DATABASE_SCHEMA, schemaFqn), csvRecord);
processRecord = false;
return null;
}
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Field 1,2,3,6,7 - database schema name, displayName, description
schema
.withName(csvRecord.get(0))
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withRetentionPeriod(csvRecord.get(5))
.withSourceUrl(csvRecord.get(6));
// Field 4 - owner
schema.withOwner(getOwner(printer, csvRecord, 3));
// Field 5 - tags
schema.withTags(getTagLabels(printer, csvRecord, 4));
if (!processRecord) {
return null;
}
// Field 8 - domain
schema.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
if (!processRecord) {
return null;
}
return schema;
}
@Override
protected List<String> toRecord(DatabaseSchema entity) {
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
List<String> recordList = new ArrayList<>();
addField(recordList, entity.getName());
addField(recordList, entity.getDisplayName());
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addField(recordList, entity.getRetentionPeriod());
addField(recordList, entity.getSourceUrl());
String domain =
entity.getDomain() == null || Boolean.TRUE.equals(entity.getDomain().getInherited())
? ""
: entity.getDomain().getFullyQualifiedName();
addField(recordList, domain);
return recordList;
}
}
}

View File

@ -13,20 +13,35 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addOwner;
import static org.openmetadata.csv.CsvUtil.addTagLabels;
import static org.openmetadata.schema.type.Include.ALL;
import static org.openmetadata.service.Entity.DATABASE_SCHEMA;
import static org.openmetadata.service.Entity.TABLE;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.EntityInterface;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.entity.data.DatabaseSchema;
import org.openmetadata.schema.entity.data.Table;
import org.openmetadata.schema.type.DatabaseSchemaProfilerConfig;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.databases.DatabaseSchemaResource;
import org.openmetadata.service.util.EntityUtil;
@ -89,7 +104,7 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
private List<EntityReference> getTables(DatabaseSchema schema) {
return schema == null
? Collections.emptyList()
: findTo(schema.getId(), Entity.DATABASE_SCHEMA, Relationship.CONTAINS, Entity.TABLE);
: findTo(schema.getId(), Entity.DATABASE_SCHEMA, Relationship.CONTAINS, TABLE);
}
public void setFields(DatabaseSchema schema, Fields fields) {
@ -159,6 +174,23 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
.withServiceType(database.getServiceType());
}
@Override
public String exportToCsv(String name, String user) throws IOException {
DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema
TableRepository repository = (TableRepository) Entity.getEntityRepository(TABLE);
ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("databaseSchema", name);
List<Table> tables = repository.listAll(repository.getFields("owner,tags,domain"), filter);
tables.sort(Comparator.comparing(EntityInterface::getFullyQualifiedName));
return new DatabaseSchemaCsv(schema, user).exportCsv(tables);
}
@Override
public CsvImportResult importFromCsv(String name, String csv, boolean dryRun, String user)
throws IOException {
DatabaseSchema schema = getByName(null, name, Fields.EMPTY_FIELDS); // Validate database schema
return new DatabaseSchemaCsv(schema, user).importCsv(csv, dryRun);
}
public class DatabaseSchemaUpdater extends EntityUpdater {
public DatabaseSchemaUpdater(
DatabaseSchema original, DatabaseSchema updated, Operation operation) {
@ -215,4 +247,71 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
setFieldsInternal(database, Fields.EMPTY_FIELDS);
return database;
}
public static class DatabaseSchemaCsv extends EntityCsv<Table> {
public static final CsvDocumentation DOCUMENTATION = getCsvDocumentation(DATABASE_SCHEMA);
public static final List<CsvHeader> HEADERS = DOCUMENTATION.getHeaders();
private final DatabaseSchema schema;
DatabaseSchemaCsv(DatabaseSchema schema, String user) {
super(TABLE, DOCUMENTATION.getHeaders(), user);
this.schema = schema;
}
@Override
protected Table toEntity(CSVPrinter printer, CSVRecord csvRecord) throws IOException {
String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), csvRecord.get(0));
Table table;
try {
table = Entity.getEntityByName(TABLE, tableFqn, "*", Include.NON_DELETED);
} catch (Exception ex) {
importFailure(printer, entityNotFound(0, TABLE, tableFqn), csvRecord);
processRecord = false;
return null;
}
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Field 1,2,3,6,7 - database schema name, displayName, description
table
.withDisplayName(csvRecord.get(1))
.withDescription(csvRecord.get(2))
.withRetentionPeriod(csvRecord.get(5))
.withSourceUrl(csvRecord.get(6));
// Field 4 - owner
table.withOwner(getOwner(printer, csvRecord, 3));
// Field 5 - tags
table.withTags(getTagLabels(printer, csvRecord, 4));
if (!processRecord) {
return null;
}
// Field 8 - domain
table.withDomain(getEntityReference(printer, csvRecord, 7, Entity.DOMAIN));
if (!processRecord) {
return null;
}
return table;
}
@Override
protected List<String> toRecord(Table entity) {
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
List<String> recordList = new ArrayList<>();
addField(recordList, entity.getName());
addField(recordList, entity.getDisplayName());
addField(recordList, entity.getDescription());
addOwner(recordList, entity.getOwner());
addTagLabels(recordList, entity.getTags());
addField(recordList, entity.getRetentionPeriod());
addField(recordList, entity.getSourceUrl());
String domain =
entity.getDomain() == null || Boolean.TRUE.equals(entity.getDomain().getInherited())
? ""
: entity.getDomain().getFullyQualifiedName();
addField(recordList, domain);
return recordList;
}
}
}

View File

@ -24,6 +24,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.parameters.RequestBody;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import javax.json.JsonPatch;
@ -56,6 +57,7 @@ import org.openmetadata.schema.type.DatabaseProfilerConfig;
import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseRepository;
import org.openmetadata.service.jdbi3.ListFilter;
@ -390,6 +392,65 @@ public class DatabaseResource extends EntityResource<Database, DatabaseRepositor
return delete(uriInfo, securityContext, id, recursive, hardDelete);
}
@GET
@Path("/name/{name}/export")
@Produces(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "exportDatabase",
summary = "Export database in CSV format",
responses = {
@ApiResponse(
responseCode = "200",
description = "Exported csv with database schemas",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = String.class)))
})
public String exportCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database", schema = @Schema(type = "string"))
@PathParam("name")
String name)
throws IOException {
return exportCsvInternal(securityContext, name);
}
@PUT
@Path("/name/{name}/import")
@Consumes(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "importDatabase",
summary =
"Import database schemas from CSV to update database schemas (no creation " + "allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import result",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public CsvImportResult importCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv)
throws IOException {
return importCsvInternal(securityContext, name, csv, dryRun);
}
@PUT
@Path("/{id}/vote")
@Operation(

View File

@ -24,6 +24,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.parameters.RequestBody;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import javax.json.JsonPatch;
@ -51,6 +52,7 @@ import org.openmetadata.schema.api.data.CreateDatabaseSchema;
import org.openmetadata.schema.api.data.RestoreEntity;
import org.openmetadata.schema.entity.data.DatabaseSchema;
import org.openmetadata.schema.type.*;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseSchemaRepository;
import org.openmetadata.service.jdbi3.ListFilter;
@ -360,6 +362,64 @@ public class DatabaseSchemaResource
return createOrUpdate(uriInfo, securityContext, schema);
}
@GET
@Path("/name/{name}/export")
@Produces(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "exportDatabaseSchema",
summary = "Export database schema in CSV format",
responses = {
@ApiResponse(
responseCode = "200",
description = "Exported csv with tables from the database schema",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = String.class)))
})
public String exportCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database schema", schema = @Schema(type = "string"))
@PathParam("name")
String name)
throws IOException {
return exportCsvInternal(securityContext, name);
}
@PUT
@Path("/name/{name}/import")
@Consumes(MediaType.TEXT_PLAIN)
@Valid
@Operation(
operationId = "importDatabaseSchema",
summary = "Import tables from CSV to update database schema (no creation allowed)",
responses = {
@ApiResponse(
responseCode = "200",
description = "Import result",
content =
@Content(
mediaType = "application/json",
schema = @Schema(implementation = CsvImportResult.class)))
})
public CsvImportResult importCsv(
@Context SecurityContext securityContext,
@Parameter(description = "Name of the Database schema", schema = @Schema(type = "string"))
@PathParam("name")
String name,
@Parameter(
description =
"Dry-run when true is used for validating the CSV without really importing it. (default=true)",
schema = @Schema(type = "boolean"))
@DefaultValue("true")
@QueryParam("dryRun")
boolean dryRun,
String csv)
throws IOException {
return importCsvInternal(securityContext, name, csv, dryRun);
}
@PUT
@Path("/{id}/vote")
@Operation(

View File

@ -0,0 +1,71 @@
{
"summary": "Database CSV file is used for importing and exporting databaseSchema metadata from and to an **existing** database.",
"headers": [
{
"name": "name",
"required": true,
"description": "The name of the database schema being updated.",
"examples": [
"`users`, `customers`"
]
},
{
"name": "displayName",
"required": false,
"description": "Display name for the databaseSchema.",
"examples": [
"`User Schema`, `Customer Schema`"
]
},
{
"name": "description",
"required": false,
"description": "Description for the database schema in Markdown format.",
"examples": [
"`Customer Schema` that contains all the tables related to customer entity."
]
},
{
"name": "owner",
"required": false,
"description": "Owner names separated by ';'. For team owner, include prefix team. For user owner, include prefix user.",
"examples": [
"`team;marketing`",
"`user;john`"
]
},
{
"name": "tags",
"required": false,
"description": "Fully qualified classification tag names associated with the database schema separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.",
"examples": [
"`PII.Sensitive`",
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "retentionPeriod",
"required": false,
"description": "Retention period of the data in the database schema. Period is expressed as duration in ISO 8601 format in UTC. Example - `P23DT23H`. When not set, the retention period is inherited from the parent database, if it exists.",
"examples": [
"`P23DT23H`"
]
},
{
"name": "sourceUrl",
"required": false,
"description": "Source URL for the database schema",
"examples": [
"http://domain.com/system/customer_schema"
]
},
{
"name": "domain",
"required": false,
"description": "Domain to which the database schema belongs to",
"examples": [
"Marketing", "Sales"
]
}
]
}

View File

@ -0,0 +1,71 @@
{
"summary": "Database CSV file is used for importing and exporting table metadata from and to an **existing** database.",
"headers": [
{
"name": "name",
"required": true,
"description": "The name of the database schema being updated.",
"examples": [
"`users`, `customers`"
]
},
{
"name": "displayName",
"required": false,
"description": "Display name for the table.",
"examples": [
"`User Schema`, `Customer Schema`"
]
},
{
"name": "description",
"required": false,
"description": "Description for the database schema in Markdown format.",
"examples": [
"`Customer Schema` that contains all the tables related to customer entity."
]
},
{
"name": "owner",
"required": false,
"description": "Owner names separated by ';'. For team owner, include prefix team. For user owner, include prefix user.",
"examples": [
"`team;marketing`",
"`user;john`"
]
},
{
"name": "tags",
"required": false,
"description": "Fully qualified classification tag names associated with the database schema separated by ';'.. These tags are automatically applied along with the glossary term, when it is used to label an entity.",
"examples": [
"`PII.Sensitive`",
"`PII.Sensitive;PersonalData.Personal`"
]
},
{
"name": "retentionPeriod",
"required": false,
"description": "Retention period of the data in the database schema. Period is expressed as duration in ISO 8601 format in UTC. Example - `P23DT23H`. When not set, the retention period is inherited from the parent database, if it exists.",
"examples": [
"`P23DT23H`"
]
},
{
"name": "sourceUrl",
"required": false,
"description": "Source URL for the database schema",
"examples": [
"http://domain.com/system/customer_schema"
]
},
{
"name": "domain",
"required": false,
"description": "Domain to which the database schema belongs to",
"examples": [
"Marketing", "Sales"
]
}
]
}

View File

@ -13,6 +13,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang.StringEscapeUtils;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
@ -95,7 +96,9 @@ public class EntityCsvTest {
}
public static String getFailedRecord(String record, String errorDetails) {
return String.format("%s,\"%s\",%s", EntityCsv.IMPORT_STATUS_FAILED, errorDetails, record);
errorDetails = StringEscapeUtils.escapeCsv(errorDetails);
String format = errorDetails.startsWith("\"") ? "%s,%s,%s" : "%s,\"%s\",%s";
return String.format(format, EntityCsv.IMPORT_STATUS_FAILED, errorDetails, record);
}
private static List<CsvHeader> getHeaders(Object[][] headers) {

View File

@ -14,9 +14,17 @@
package org.openmetadata.service.resources.databases;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static org.apache.commons.lang.StringEscapeUtils.escapeCsv;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.openmetadata.common.utils.CommonUtil.listOf;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.recordToString;
import static org.openmetadata.csv.EntityCsv.entityNotFound;
import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.csv.EntityCsvTest.assertSummary;
import static org.openmetadata.csv.EntityCsvTest.createCsv;
import static org.openmetadata.csv.EntityCsvTest.getFailedRecord;
import static org.openmetadata.service.util.EntityUtil.getFqn;
import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import static org.openmetadata.service.util.TestUtils.assertListNotEmpty;
@ -27,15 +35,21 @@ import static org.openmetadata.service.util.TestUtils.assertResponseContains;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.HttpResponseException;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.api.data.CreateDatabase;
import org.openmetadata.schema.api.data.CreateDatabaseSchema;
import org.openmetadata.schema.entity.data.Database;
import org.openmetadata.schema.type.ApiStatus;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseRepository.DatabaseCsv;
import org.openmetadata.service.jdbi3.DatabaseSchemaRepository.DatabaseSchemaCsv;
import org.openmetadata.service.resources.EntityResourceTest;
import org.openmetadata.service.resources.databases.DatabaseResource.DatabaseList;
import org.openmetadata.service.util.FullyQualifiedName;
@ -90,6 +104,64 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
}
}
@Test
@SneakyThrows
void testImportInvalidCsv() {
Database database = createEntity(createRequest("invalidCsv"), ADMIN_AUTH_HEADERS);
String databaseName = database.getFullyQualifiedName();
DatabaseSchemaResourceTest schemaTest = new DatabaseSchemaResourceTest();
CreateDatabaseSchema createSchema =
schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName());
schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Update databaseSchema with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,";
String csv = createCsv(DatabaseCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// Existing schema can be updated. New schema can't be created.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(databaseName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String schemaFqn = FullyQualifiedName.add(database.getFullyQualifiedName(), "non-existing");
expectedRows =
new String[] {
resultsHeader,
getFailedRecord(record, entityNotFound(0, Entity.DATABASE_SCHEMA, schemaFqn))
};
assertRows(result, expectedRows);
}
@Test
void testImportExport() throws IOException {
String user1 = USER1.getName();
Database database = createEntity(createRequest("importExportTest"), ADMIN_AUTH_HEADERS);
DatabaseSchemaResourceTest schemaTest = new DatabaseSchemaResourceTest();
CreateDatabaseSchema createSchema =
schemaTest.createRequest("s1").withDatabase(database.getFullyQualifiedName());
schemaTest.createEntity(createSchema, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Update terms with change in description
String record =
String.format(
"s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s",
user1, escapeCsv(DOMAIN.getFullyQualifiedName()));
// Update created entity with changes
importCsvAndValidate(
database.getFullyQualifiedName(), DatabaseCsv.HEADERS, null, listOf(record));
}
@Override
public Database validateGetWithDifferentFields(Database database, boolean byName)
throws HttpResponseException {

View File

@ -14,28 +14,42 @@
package org.openmetadata.service.resources.databases;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static org.apache.commons.lang.StringEscapeUtils.escapeCsv;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.openmetadata.common.utils.CommonUtil.listOf;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.recordToString;
import static org.openmetadata.csv.EntityCsv.entityNotFound;
import static org.openmetadata.csv.EntityCsvTest.assertRows;
import static org.openmetadata.csv.EntityCsvTest.assertSummary;
import static org.openmetadata.csv.EntityCsvTest.createCsv;
import static org.openmetadata.csv.EntityCsvTest.getFailedRecord;
import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import static org.openmetadata.service.util.TestUtils.assertListNotNull;
import static org.openmetadata.service.util.TestUtils.assertListNull;
import static org.openmetadata.service.util.TestUtils.assertResponseContains;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.client.HttpResponseException;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.api.data.CreateDatabaseSchema;
import org.openmetadata.schema.api.data.CreateTable;
import org.openmetadata.schema.api.data.RestoreEntity;
import org.openmetadata.schema.entity.data.DatabaseSchema;
import org.openmetadata.schema.entity.data.Table;
import org.openmetadata.schema.type.ApiStatus;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.DatabaseSchemaRepository.DatabaseSchemaCsv;
import org.openmetadata.service.resources.EntityResourceTest;
import org.openmetadata.service.resources.databases.DatabaseSchemaResource.DatabaseSchemaList;
import org.openmetadata.service.util.FullyQualifiedName;
@ -70,7 +84,7 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea
tableResourceTest
.createRequest("t1", "", "", null)
.withDatabaseSchema(createdSchema.getFullyQualifiedName());
Table table1 = tableResourceTest.createEntity(createTable, ADMIN_AUTH_HEADERS);
tableResourceTest.createEntity(createTable, ADMIN_AUTH_HEADERS);
createTable =
tableResourceTest
.createRequest("t2", "", "", null)
@ -91,6 +105,65 @@ class DatabaseSchemaResourceTest extends EntityResourceTest<DatabaseSchema, Crea
assertNotNull(schema);
}
@Test
@SneakyThrows
void testImportInvalidCsv() {
DatabaseSchema schema = createEntity(createRequest("invalidCsv"), ADMIN_AUTH_HEADERS);
String schemaName = schema.getFullyQualifiedName();
TableResourceTest tableTest = new TableResourceTest();
CreateTable createTable =
tableTest.createRequest("s1").withDatabaseSchema(schema.getFullyQualifiedName());
tableTest.createEntity(createTable, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Create table with invalid tags field
String resultsHeader = recordToString(EntityCsv.getResultHeaders(DatabaseSchemaCsv.HEADERS));
String record = "s1,dsp1,dsc1,,Tag.invalidTag,,,";
String csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
CsvImportResult result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(4, "tag", "Tag.invalidTag"))
};
assertRows(result, expectedRows);
// Existing table can be updated. New table can't be created.
record = "non-existing,dsp1,dsc1,,Tag.invalidTag,,,";
csv = createCsv(DatabaseSchemaCsv.HEADERS, listOf(record), null);
result = importCsv(schemaName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String tableFqn = FullyQualifiedName.add(schema.getFullyQualifiedName(), "non-existing");
tableFqn.replace("\"", "\"\""); // To handle double quote CSV escaping
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(0, Entity.TABLE, tableFqn))
};
assertRows(result, expectedRows);
}
@Test
void testImportExport() throws IOException {
String user1 = USER1.getName();
DatabaseSchema schema = createEntity(createRequest("importExportTest"), ADMIN_AUTH_HEADERS);
TableResourceTest tableTest = new TableResourceTest();
CreateTable createTable =
tableTest.createRequest("s1").withDatabaseSchema(schema.getFullyQualifiedName());
tableTest.createEntity(createTable, ADMIN_AUTH_HEADERS);
// Headers: name, displayName, description, owner, tags, retentionPeriod, sourceUrl, domain
// Update terms with change in description
List<String> updateRecords =
listOf(
String.format(
"s1,dsp1,new-dsc1,user;%s,Tier.Tier1,P23DT23H,http://test.com,%s",
user1, escapeCsv(DOMAIN.getFullyQualifiedName())));
// Update created entity with changes
importCsvAndValidate(
schema.getFullyQualifiedName(), DatabaseSchemaCsv.HEADERS, null, updateRecords);
}
@Override
public DatabaseSchema validateGetWithDifferentFields(DatabaseSchema schema, boolean byName)
throws HttpResponseException {

View File

@ -371,7 +371,7 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
Awaitility.await().atMost(4, TimeUnit.SECONDS).until(() -> true);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows = {
resultsHeader, getFailedRecord(record, "[name must match \"\"(?U)^[\\w'\\- .&()%]+$\"\"]")
resultsHeader, getFailedRecord(record, "[name must match \"(?U)^[\\w'\\- .&()%]+$\"]")
};
assertRows(result, expectedRows);
@ -382,7 +382,10 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
Awaitility.await().atMost(4, TimeUnit.SECONDS).until(() -> true);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {resultsHeader, getFailedRecord(record, entityNotFound(0, "invalidParent"))};
new String[] {
resultsHeader,
getFailedRecord(record, entityNotFound(0, Entity.GLOSSARY_TERM, "invalidParent"))
};
assertRows(result, expectedRows);
// Create glossaryTerm with invalid tags field
@ -391,7 +394,9 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
result = importCsv(glossaryName, csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {resultsHeader, getFailedRecord(record, entityNotFound(7, "Tag.invalidTag"))};
new String[] {
resultsHeader, getFailedRecord(record, entityNotFound(7, Entity.TAG, "Tag.invalidTag"))
};
assertRows(result, expectedRows);
}

View File

@ -801,7 +801,8 @@ public class TeamResourceTest extends EntityResourceTest<Team, CreateTeam> {
CsvImportResult result = importCsv(team.getName(), csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows = {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(8, "invalidPolicy"))
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(8, Entity.POLICY, "invalidPolicy"))
};
assertRows(result, expectedRows);
@ -812,7 +813,8 @@ public class TeamResourceTest extends EntityResourceTest<Team, CreateTeam> {
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(7, "invalidRole"))
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(7, Entity.ROLE, "invalidRole"))
};
assertRows(result, expectedRows);
@ -823,7 +825,8 @@ public class TeamResourceTest extends EntityResourceTest<Team, CreateTeam> {
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(5, "invalidOwner"))
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(5, Entity.USER, "invalidOwner"))
};
assertRows(result, expectedRows);
@ -834,7 +837,7 @@ public class TeamResourceTest extends EntityResourceTest<Team, CreateTeam> {
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(4, "invalidParent"))
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(4, TEAM, "invalidParent"))
};
assertRows(result, expectedRows);

View File

@ -1096,7 +1096,7 @@ public class UserResourceTest extends EntityResourceTest<User, CreateUser> {
CsvImportResult result = importCsv(team.getName(), csv, false);
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
String[] expectedRows = {
resultsHeader, getFailedRecord(record, "[name must match \"\"(?U)^[\\w\\-.]+$\"\"]")
resultsHeader, getFailedRecord(record, "[name must match \"(?U)^[\\w\\-.]+$\"]")
};
assertRows(result, expectedRows);
@ -1108,7 +1108,8 @@ public class UserResourceTest extends EntityResourceTest<User, CreateUser> {
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(6, "invalidTeam"))
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(6, Entity.TEAM, "invalidTeam"))
};
assertRows(result, expectedRows);
@ -1119,7 +1120,8 @@ public class UserResourceTest extends EntityResourceTest<User, CreateUser> {
assertSummary(result, ApiStatus.FAILURE, 2, 1, 1);
expectedRows =
new String[] {
resultsHeader, getFailedRecord(record, EntityCsv.entityNotFound(7, "invalidRole"))
resultsHeader,
getFailedRecord(record, EntityCsv.entityNotFound(7, Entity.ROLE, "invalidRole"))
};
assertRows(result, expectedRows);
}