Undo entity status migration, show entityStatus by default (#23511)

* Revert Entity Status Migrations as we already have the default value

* Batch Migrations and by default show entityStatus field in the response

* Setting default value of enum in the status.json and fixing test cases for new default enum UNPROCESSED

* Update generated TypeScript types

* DataContract by default has Unprocessed status, Overriden method in the GlossaryTermRepository

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Ram Narayan Balaji 2025-09-24 18:38:03 +05:30 committed by GitHub
parent c6c9eb26e2
commit 532f3e5e4e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 266 additions and 221 deletions

View File

@ -159,12 +159,6 @@ public class DataContractRepository extends EntityRepository<DataContract> {
createOrUpdateDataContractTestSuite(dataContract, update);
}
@Override
protected void setDefaultStatus(DataContract entity, boolean update) {
// If the contract status is marked as null, let it be null, If it is not marked as null, leave
// it as is, so no implementation here
}
// Ensure we have a pipeline after creation if needed
@Override
protected void postCreate(DataContract dataContract) {

View File

@ -585,8 +585,8 @@ public abstract class EntityRepository<T extends EntityInterface> {
if (entity.getEntityStatus() != null) {
return;
}
// Set default status to APPROVED
entity.setEntityStatus(EntityStatus.APPROVED);
// Set default status to UNPROCESSED
entity.setEntityStatus(EntityStatus.UNPROCESSED);
}
/**
@ -1186,11 +1186,6 @@ public abstract class EntityRepository<T extends EntityInterface> {
entity.setReviewers(
fields.contains(FIELD_REVIEWERS) ? getReviewers(entity) : entity.getReviewers());
entity.setVotes(fields.contains(FIELD_VOTES) ? getVotes(entity) : entity.getVotes());
if (fields.contains(FIELD_ENTITY_STATUS)) {
if (entity.getEntityStatus() == null) {
entity.setEntityStatus(EntityStatus.APPROVED);
}
}
setFields(entity, fields);
return entity;

View File

@ -233,17 +233,42 @@ public class GlossaryTermRepository extends EntityRepository<GlossaryTerm> {
// Validate related terms
EntityUtil.populateEntityReferences(entity.getRelatedTerms());
if (!update || entity.getEntityStatus() == null) {
// If parentTerm or glossary has reviewers set, the glossary term can only be created in
// `Draft` mode
entity.setEntityStatus(
!nullOrEmpty(parentReviewers) ? EntityStatus.DRAFT : EntityStatus.APPROVED);
}
if (!update) {
checkDuplicateTerms(entity);
}
}
@Override
protected void setDefaultStatus(GlossaryTerm entity, boolean update) {
// If the entityStatus is set as Unprocessed then it is the default value from the POJO
if (!update
|| entity.getEntityStatus() == null
|| entity.getEntityStatus() == EntityStatus.UNPROCESSED) {
// Get reviewers from parent term or glossary to determine appropriate default status
List<EntityReference> parentReviewers = null;
// Get parent reviewers if parent term exists
if (entity.getParent() != null) {
GlossaryTerm parentTerm =
Entity.getEntity(
entity.getParent().withType(GLOSSARY_TERM), "reviewers", Include.NON_DELETED);
parentReviewers = parentTerm.getReviewers();
}
// Get glossary reviewers if no parent reviewers
if (parentReviewers == null && entity.getGlossary() != null) {
Glossary glossary =
Entity.getEntity(entity.getGlossary(), "reviewers", Include.NON_DELETED);
parentReviewers = glossary.getReviewers();
}
// If parentTerm or glossary has reviewers set, the glossary term can only be created in
// `Draft` mode, otherwise use `Approved`
entity.setEntityStatus(
!nullOrEmpty(parentReviewers) ? EntityStatus.DRAFT : EntityStatus.APPROVED);
}
}
@Override
public void storeEntity(GlossaryTerm entity, boolean update) {
// Relationships and fields such as parentTerm are derived and not stored as part of json

View File

@ -1,6 +1,7 @@
package org.openmetadata.service.migration.mysql.v1100;
import lombok.SneakyThrows;
import org.openmetadata.service.jdbi3.locator.ConnectionType;
import org.openmetadata.service.migration.api.MigrationProcessImpl;
import org.openmetadata.service.migration.utils.MigrationFile;
import org.openmetadata.service.migration.utils.v1100.MigrationUtil;
@ -14,7 +15,7 @@ public class Migration extends MigrationProcessImpl {
@Override
@SneakyThrows
public void runDataMigration() {
MigrationUtil migrationUtil = new MigrationUtil(collectionDAO);
migrationUtil.migrateEntityStatusForExistingEntities(handle);
MigrationUtil migrationUtil = new MigrationUtil(handle, ConnectionType.MYSQL);
migrationUtil.migrateEntityStatusForExistingEntities();
}
}

View File

@ -1,6 +1,7 @@
package org.openmetadata.service.migration.postgres.v1100;
import lombok.SneakyThrows;
import org.openmetadata.service.jdbi3.locator.ConnectionType;
import org.openmetadata.service.migration.api.MigrationProcessImpl;
import org.openmetadata.service.migration.utils.MigrationFile;
import org.openmetadata.service.migration.utils.v1100.MigrationUtil;
@ -14,7 +15,7 @@ public class Migration extends MigrationProcessImpl {
@Override
@SneakyThrows
public void runDataMigration() {
MigrationUtil migrationUtil = new MigrationUtil(collectionDAO);
migrationUtil.migrateEntityStatusForExistingEntities(handle);
MigrationUtil migrationUtil = new MigrationUtil(handle, ConnectionType.POSTGRES);
migrationUtil.migrateEntityStatusForExistingEntities();
}
}

View File

@ -1,121 +1,121 @@
package org.openmetadata.service.migration.utils.v1100;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import lombok.extern.slf4j.Slf4j;
import org.jdbi.v3.core.Handle;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.jdbi3.locator.ConnectionType;
@Slf4j
public class MigrationUtil {
private static final int BATCH_SIZE = 500;
private final CollectionDAO collectionDAO;
private boolean isPostgres = false;
private final Handle handle;
private final ConnectionType connectionType;
public MigrationUtil(CollectionDAO collectionDAO) {
this.collectionDAO = collectionDAO;
public MigrationUtil(Handle handle, ConnectionType connectionType) {
this.handle = handle;
this.connectionType = connectionType;
}
public void migrateEntityStatusForExistingEntities(Handle handle) {
try {
Connection connection = handle.getConnection();
DatabaseMetaData metaData = connection.getMetaData();
String dbType = metaData.getDatabaseProductName().toLowerCase();
isPostgres = dbType.contains("postgres") || dbType.contains("postgresql");
LOG.info(
"Starting entityStatus migration for v1.10.0 on {} database",
isPostgres ? "PostgreSQL" : "MySQL");
} catch (SQLException e) {
LOG.error("Failed to determine database type, assuming MySQL: {}", e.getMessage());
isPostgres = false;
}
// All entity tables that need entityStatus field
String[] entityTables = {
"table_entity",
"dashboard_entity",
"pipeline_entity",
"topic_entity",
"ml_model_entity",
"storage_container_entity",
"search_index_entity",
"stored_procedure_entity",
"dashboard_data_model_entity",
"database_entity",
"database_schema_entity",
"metric_entity",
"chart_entity",
"report_entity",
"data_product_entity",
"tag",
"classification",
"glossary_term_entity",
"data_contract_entity",
"test_case"
};
public void migrateEntityStatusForExistingEntities() {
int totalEntitiesMigrated = 0;
for (String tableName : entityTables) {
int migrated = 0;
if (tableName.equals("glossary_term_entity")) {
migrated = migrateGlossaryTermStatus(handle);
} else if (tableName.equals("data_contract_entity")) {
migrated = migrateDataContractStatus(handle);
} else {
migrated = migrateEntityStatusForTable(handle, tableName);
}
totalEntitiesMigrated += migrated;
}
// Only migrate glossary terms and data contracts that have existing status fields
totalEntitiesMigrated += migrateGlossaryTermStatus();
totalEntitiesMigrated += migrateDataContractStatus();
LOG.info("===== MIGRATION SUMMARY =====");
LOG.info("Total entities migrated with entityStatus field: {}", totalEntitiesMigrated);
LOG.info("Total entities migrated with status field changes: {}", totalEntitiesMigrated);
LOG.info("===== MIGRATION COMPLETE =====");
}
private int migrateEntityStatusForTable(Handle handle, String tableName) {
LOG.info("Processing table: {}", tableName);
private int migrateGlossaryTermStatus() {
LOG.info("Processing glossary_term_entity: migrating 'status' to 'entityStatus'");
int totalMigrated = 0;
int batchNumber = 0;
try {
// First, get the total count of entities that need migration
String countSql = buildCountQuery(tableName);
String countSql = buildGlossaryTermCountQuery();
int totalToMigrate = handle.createQuery(countSql).mapTo(Integer.class).one();
if (totalToMigrate == 0) {
LOG.info(
"✓ Completed {}: No records needed migration (already have entityStatus)", tableName);
LOG.info("✓ Completed glossary_term_entity: No records needed migration");
return 0;
}
LOG.info(" Found {} records to migrate in {}", totalToMigrate, tableName);
LOG.info(" Found {} glossary terms to migrate", totalToMigrate);
if (isPostgres) {
// PostgreSQL: Use CTE with LIMIT for batch processing
totalMigrated = migratePostgresBatch(handle, tableName, totalToMigrate);
if (connectionType == ConnectionType.POSTGRES) {
totalMigrated = migrateGlossaryTermPostgresBatch(totalToMigrate);
} else {
// MySQL: Need to use ORDER BY with LIMIT for deterministic batches
totalMigrated = migrateMySQLBatch(handle, tableName, totalToMigrate);
totalMigrated = migrateGlossaryTermMySQLBatch(totalToMigrate);
}
if (totalMigrated > 0) {
LOG.info("✓ Completed {}: {} total records migrated", tableName, totalMigrated);
LOG.info("✓ Completed glossary_term_entity: {} total records migrated", totalMigrated);
}
} catch (Exception e) {
LOG.error("✗ FAILED migrating entityStatus for table {}: {}", tableName, e.getMessage(), e);
LOG.error("✗ FAILED migrating glossary_term_entity status: {}", e.getMessage(), e);
}
return totalMigrated;
}
private int migratePostgresBatch(Handle handle, String tableName, int totalToMigrate)
throws InterruptedException {
private int migrateDataContractStatus() {
LOG.info(
"Processing data_contract_entity: migrating 'status' to 'entityStatus' and 'Active' to 'Approved'");
int totalMigrated = 0;
try {
// First, get the total count of entities that need migration
String countSql = buildDataContractCountQuery();
int totalToMigrate = handle.createQuery(countSql).mapTo(Integer.class).one();
if (totalToMigrate == 0) {
LOG.info("✓ Completed data_contract_entity: No records needed migration");
return 0;
}
LOG.info(" Found {} data contracts to migrate", totalToMigrate);
if (connectionType == ConnectionType.POSTGRES) {
totalMigrated = migrateDataContractPostgresBatch(totalToMigrate);
} else {
totalMigrated = migrateDataContractMySQLBatch(totalToMigrate);
}
if (totalMigrated > 0) {
LOG.info("✓ Completed data_contract_entity: {} total records migrated", totalMigrated);
}
} catch (Exception e) {
LOG.error("✗ FAILED migrating data_contract_entity status: {}", e.getMessage(), e);
}
return totalMigrated;
}
private String buildGlossaryTermCountQuery() {
if (connectionType == ConnectionType.POSTGRES) {
return "SELECT COUNT(*) FROM glossary_term_entity "
+ "WHERE json ?? 'status' AND NOT json ?? 'entityStatus'";
} else {
return "SELECT COUNT(*) FROM glossary_term_entity "
+ "WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ "AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0";
}
}
private String buildDataContractCountQuery() {
if (connectionType == ConnectionType.POSTGRES) {
return "SELECT COUNT(*) FROM data_contract_entity "
+ "WHERE json ?? 'status' AND NOT json ?? 'entityStatus'";
} else {
return "SELECT COUNT(*) FROM data_contract_entity "
+ "WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ "AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0";
}
}
private int migrateGlossaryTermPostgresBatch(int totalToMigrate) throws InterruptedException {
int totalMigrated = 0;
int batchNumber = 0;
@ -126,17 +126,18 @@ public class MigrationUtil {
String.format(
"WITH batch AS ( "
+ " SELECT id "
+ " FROM %1$s "
+ " WHERE NOT ((json)::jsonb ?? 'entityStatus') "
+ " FROM glossary_term_entity "
+ " WHERE json ?? 'status' AND NOT json ?? 'entityStatus' "
+ " ORDER BY id "
+ " LIMIT %2$d "
+ " LIMIT %d "
+ ") "
+ "UPDATE %1$s t "
+ "SET json = jsonb_set((t.json)::jsonb, '{entityStatus}', '\"Approved\"'::jsonb)::json "
+ "UPDATE glossary_term_entity t "
+ "SET json = jsonb_set(t.json - 'status', '{entityStatus}', "
+ "COALESCE(t.json->'status', '\"Approved\"'::jsonb)) "
+ "FROM batch "
+ "WHERE t.id = batch.id "
+ " AND NOT ((t.json)::jsonb ?? 'entityStatus')",
tableName, BATCH_SIZE);
+ " AND t.json ?? 'status' AND NOT t.json ?? 'entityStatus'",
BATCH_SIZE);
long startTime = System.currentTimeMillis();
int batchCount = handle.createUpdate(updateSql).execute();
@ -145,11 +146,10 @@ public class MigrationUtil {
if (batchCount > 0) {
totalMigrated += batchCount;
LOG.info(
" Batch {}: Migrated {} records in {}ms (Total for {}: {}/{})",
" Batch {}: Migrated {} glossary terms in {}ms (Total: {}/{})",
batchNumber,
batchCount,
executionTime,
tableName,
totalMigrated,
totalToMigrate);
Thread.sleep(100);
@ -161,8 +161,7 @@ public class MigrationUtil {
return totalMigrated;
}
private int migrateMySQLBatch(Handle handle, String tableName, int totalToMigrate)
throws InterruptedException {
private int migrateGlossaryTermMySQLBatch(int totalToMigrate) throws InterruptedException {
int totalMigrated = 0;
int batchNumber = 0;
@ -171,17 +170,20 @@ public class MigrationUtil {
String updateSql =
String.format(
"UPDATE %1$s t "
"UPDATE glossary_term_entity t "
+ "JOIN ( "
+ " SELECT id "
+ " FROM %1$s "
+ " WHERE JSON_EXTRACT(json, '$.entityStatus') IS NULL "
+ " FROM glossary_term_entity "
+ " WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ " AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0 "
+ " ORDER BY id "
+ " LIMIT %2$d "
+ " LIMIT %d "
+ ") s ON t.id = s.id "
+ "SET t.json = JSON_SET(t.json, '$.entityStatus', 'Approved') "
+ "WHERE JSON_EXTRACT(t.json, '$.entityStatus') IS NULL",
tableName, BATCH_SIZE);
+ "SET t.json = JSON_SET(JSON_REMOVE(t.json, '$.status'), '$.entityStatus', "
+ "COALESCE(JSON_UNQUOTE(JSON_EXTRACT(t.json, '$.status')), 'Approved')) "
+ "WHERE JSON_CONTAINS_PATH(t.json, 'one', '$.status') = 1 "
+ " AND JSON_CONTAINS_PATH(t.json, 'one', '$.entityStatus') = 0",
BATCH_SIZE);
long startTime = System.currentTimeMillis();
int batchCount = handle.createUpdate(updateSql).execute();
@ -190,11 +192,10 @@ public class MigrationUtil {
if (batchCount > 0) {
totalMigrated += batchCount;
LOG.info(
" Batch {}: Migrated {} records in {}ms (Total for {}: {}/{})",
" Batch {}: Migrated {} glossary terms in {}ms (Total: {}/{})",
batchNumber,
batchCount,
executionTime,
tableName,
totalMigrated,
totalToMigrate);
Thread.sleep(100);
@ -206,108 +207,101 @@ public class MigrationUtil {
return totalMigrated;
}
private int migrateGlossaryTermStatus(Handle handle) {
LOG.info("Processing glossary_term_entity: migrating 'status' to 'entityStatus'");
private int migrateDataContractPostgresBatch(int totalToMigrate) throws InterruptedException {
int totalMigrated = 0;
int batchNumber = 0;
try {
String sql;
if (isPostgres) {
sql =
"UPDATE glossary_term_entity "
+ "SET json = jsonb_set(json - 'status', '{entityStatus}', "
+ "COALESCE(json->'status', '\"Approved\"'::jsonb)) "
+ "WHERE json ?? 'status' "
+ "AND NOT json ?? 'entityStatus'";
} else {
sql =
"UPDATE glossary_term_entity "
+ "SET json = JSON_SET(JSON_REMOVE(json, '$.status'), '$.entityStatus', "
+ "COALESCE(JSON_UNQUOTE(JSON_EXTRACT(json, '$.status')), 'Approved')) "
+ "WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ "AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0";
}
while (totalMigrated < totalToMigrate) {
batchNumber++;
String updateSql =
String.format(
"WITH batch AS ( "
+ " SELECT id "
+ " FROM data_contract_entity "
+ " WHERE json ?? 'status' AND NOT json ?? 'entityStatus' "
+ " ORDER BY id "
+ " LIMIT %d "
+ ") "
+ "UPDATE data_contract_entity t "
+ "SET json = jsonb_set(t.json - 'status', '{entityStatus}', "
+ "CASE "
+ " WHEN t.json->>'status' = 'Active' THEN '\"Approved\"'::jsonb "
+ " ELSE COALESCE(t.json->'status', '\"Approved\"'::jsonb) "
+ "END) "
+ "FROM batch "
+ "WHERE t.id = batch.id "
+ " AND t.json ?? 'status' AND NOT t.json ?? 'entityStatus'",
BATCH_SIZE);
long startTime = System.currentTimeMillis();
totalMigrated = handle.createUpdate(sql).execute();
int batchCount = handle.createUpdate(updateSql).execute();
long executionTime = System.currentTimeMillis() - startTime;
if (totalMigrated > 0) {
if (batchCount > 0) {
totalMigrated += batchCount;
LOG.info(
"✓ Completed glossary_term_entity: {} records migrated from 'status' to 'entityStatus' in {}ms",
" Batch {}: Migrated {} data contracts in {}ms (Total: {}/{})",
batchNumber,
batchCount,
executionTime,
totalMigrated,
executionTime);
totalToMigrate);
Thread.sleep(100);
} else {
LOG.info("✓ Completed glossary_term_entity: No records needed migration");
break;
}
} catch (Exception e) {
LOG.error("✗ FAILED migrating glossary_term_entity status: {}", e.getMessage(), e);
}
return totalMigrated;
}
private int migrateDataContractStatus(Handle handle) {
LOG.info(
"Processing data_contract_entity: migrating 'status' to 'entityStatus' and 'Active' to 'Approved'");
private int migrateDataContractMySQLBatch(int totalToMigrate) throws InterruptedException {
int totalMigrated = 0;
int batchNumber = 0;
try {
String sql;
if (isPostgres) {
// PostgreSQL: Rename status to entityStatus and convert Active to Approved
sql =
"UPDATE data_contract_entity "
+ "SET json = jsonb_set(json - 'status', '{entityStatus}', "
+ "CASE "
+ " WHEN json->>'status' = 'Active' THEN '\"Approved\"'::jsonb "
+ " ELSE COALESCE(json->'status', '\"Approved\"'::jsonb) "
+ "END) "
+ "WHERE json ?? 'status' "
+ "AND NOT json ?? 'entityStatus'";
} else {
// MySQL: Rename status to entityStatus and convert Active to Approved
sql =
"UPDATE data_contract_entity "
+ "SET json = JSON_SET(JSON_REMOVE(json, '$.status'), '$.entityStatus', "
+ "CASE "
+ " WHEN JSON_UNQUOTE(JSON_EXTRACT(json, '$.status')) = 'Active' THEN 'Approved' "
+ " ELSE COALESCE(JSON_UNQUOTE(JSON_EXTRACT(json, '$.status')), 'Approved') "
+ "END) "
+ "WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ "AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0";
}
while (totalMigrated < totalToMigrate) {
batchNumber++;
String updateSql =
String.format(
"UPDATE data_contract_entity t "
+ "JOIN ( "
+ " SELECT id "
+ " FROM data_contract_entity "
+ " WHERE JSON_CONTAINS_PATH(json, 'one', '$.status') = 1 "
+ " AND JSON_CONTAINS_PATH(json, 'one', '$.entityStatus') = 0 "
+ " ORDER BY id "
+ " LIMIT %d "
+ ") s ON t.id = s.id "
+ "SET t.json = JSON_SET(JSON_REMOVE(t.json, '$.status'), '$.entityStatus', "
+ "CASE "
+ " WHEN JSON_UNQUOTE(JSON_EXTRACT(t.json, '$.status')) = 'Active' THEN 'Approved' "
+ " ELSE COALESCE(JSON_UNQUOTE(JSON_EXTRACT(t.json, '$.status')), 'Approved') "
+ "END) "
+ "WHERE JSON_CONTAINS_PATH(t.json, 'one', '$.status') = 1 "
+ " AND JSON_CONTAINS_PATH(t.json, 'one', '$.entityStatus') = 0",
BATCH_SIZE);
long startTime = System.currentTimeMillis();
totalMigrated = handle.createUpdate(sql).execute();
int batchCount = handle.createUpdate(updateSql).execute();
long executionTime = System.currentTimeMillis() - startTime;
if (totalMigrated > 0) {
if (batchCount > 0) {
totalMigrated += batchCount;
LOG.info(
"✓ Completed data_contract_entity: {} records migrated from 'status' to 'entityStatus' in {}ms",
" Batch {}: Migrated {} data contracts in {}ms (Total: {}/{})",
batchNumber,
batchCount,
executionTime,
totalMigrated,
executionTime);
totalToMigrate);
Thread.sleep(100);
} else {
LOG.info("✓ Completed data_contract_entity: No records needed migration");
break;
}
} catch (Exception e) {
LOG.error("✗ FAILED migrating data_contract_entity status: {}", e.getMessage(), e);
}
return totalMigrated;
}
private String buildCountQuery(String tableName) {
if (isPostgres) {
return String.format(
"SELECT COUNT(*) FROM %s " + "WHERE NOT (json ?? 'entityStatus')", tableName);
} else {
return String.format(
"SELECT COUNT(*) FROM %s " + "WHERE JSON_EXTRACT(json, '$.entityStatus') IS NULL",
tableName);
}
}
}

View File

@ -1627,7 +1627,7 @@ public class DataContractResourceTest extends EntityResourceTest<DataContract, C
CreateDataContract create =
createDataContractRequest(test.getDisplayName(), table).withEntityStatus(null);
DataContract created = createDataContract(create);
assertNull(created.getEntityStatus());
assertEquals(EntityStatus.UNPROCESSED, created.getEntityStatus());
String originalJson = JsonUtils.pojoToJson(created);
created.setEntityStatus(EntityStatus.APPROVED);

View File

@ -600,18 +600,18 @@ public class DataProductResourceTest extends EntityResourceTest<DataProduct, Cre
CreateDataProduct createDataProduct = createRequest(getEntityName(test));
DataProduct dataProduct = createEntity(createDataProduct, ADMIN_AUTH_HEADERS);
// Verify the data product is created with APPROVED status
// Verify the data product is created with UNPROCESSED status
assertEquals(
EntityStatus.APPROVED,
EntityStatus.UNPROCESSED,
dataProduct.getEntityStatus(),
"DataProduct should be created with APPROVED status");
"DataProduct should be created with UNPROCESSED status");
// Update the entityStatus using PATCH operation
String originalJson = JsonUtils.pojoToJson(dataProduct);
dataProduct.setEntityStatus(EntityStatus.IN_REVIEW);
ChangeDescription change = getChangeDescription(dataProduct, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.APPROVED, EntityStatus.IN_REVIEW);
fieldUpdated(change, "entityStatus", EntityStatus.UNPROCESSED, EntityStatus.IN_REVIEW);
DataProduct updatedDataProduct =
patchEntityAndCheck(dataProduct, originalJson, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);

View File

@ -4317,22 +4317,22 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
@Test
void test_entityStatusUpdateAndPatch(TestInfo test) throws IOException {
// Create a test case with APPROVED status by default
// Create a test case with UNPROCESSED status by default
CreateTestCase createTestCase = createRequest(getEntityName(test));
TestCase testCase = createEntity(createTestCase, ADMIN_AUTH_HEADERS);
// Verify the test case is created with APPROVED status
// Verify the test case is created with UNPROCESSED status
assertEquals(
EntityStatus.APPROVED,
EntityStatus.UNPROCESSED,
testCase.getEntityStatus(),
"TestCase should be created with APPROVED status");
"TestCase should be created with UNPROCESSED status");
// Update the entityStatus using PATCH operation
String originalJson = JsonUtils.pojoToJson(testCase);
testCase.setEntityStatus(EntityStatus.IN_REVIEW);
ChangeDescription change = getChangeDescription(testCase, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.APPROVED, EntityStatus.IN_REVIEW);
fieldUpdated(change, "entityStatus", EntityStatus.UNPROCESSED, EntityStatus.IN_REVIEW);
TestCase updatedTestCase =
patchEntityAndCheck(testCase, originalJson, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
@ -4426,9 +4426,9 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
// Verify initial state
assertEquals(
EntityStatus.APPROVED,
EntityStatus.UNPROCESSED,
testCase.getEntityStatus(),
"TestCase should be created with APPROVED status");
"TestCase should be created with UNPROCESSED status");
assertTrue(nullOrEmpty(testCase.getReviewers()), "TestCase should have no reviewers initially");
// Add reviewers using PATCH
@ -4448,7 +4448,7 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
origJson = JsonUtils.pojoToJson(testCase);
testCase.withEntityStatus(EntityStatus.IN_REVIEW);
change = getChangeDescription(testCase, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.APPROVED, EntityStatus.IN_REVIEW);
fieldUpdated(change, "entityStatus", EntityStatus.UNPROCESSED, EntityStatus.IN_REVIEW);
testCase = patchEntityAndCheck(testCase, origJson, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
// Verify entityStatus was updated
@ -4474,16 +4474,16 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
// Test clearing reviewers and updating status back to APPROVED
origJson = JsonUtils.pojoToJson(testCase);
testCase.withEntityStatus(EntityStatus.APPROVED).withReviewers(null);
testCase.withEntityStatus(EntityStatus.UNPROCESSED).withReviewers(null);
change = getChangeDescription(testCase, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.IN_REVIEW, EntityStatus.APPROVED);
fieldUpdated(change, "entityStatus", EntityStatus.IN_REVIEW, EntityStatus.UNPROCESSED);
fieldDeleted(change, "reviewers", List.of(USER2.getEntityReference()));
testCase = patchEntity(testCase.getId(), origJson, testCase, ADMIN_AUTH_HEADERS, null);
// Verify final state
assertNotNull(testCase);
assertEquals(
EntityStatus.APPROVED,
EntityStatus.UNPROCESSED,
testCase.getEntityStatus(),
"TestCase should be updated to APPROVED status");
assertTrue(

View File

@ -247,18 +247,18 @@ public class ClassificationResourceTest
CreateClassification createClassification = createRequest(getEntityName(test));
Classification classification = createEntity(createClassification, ADMIN_AUTH_HEADERS);
// Verify the classification is created with APPROVED status
// Verify the classification is created with UNPROCESSED status
assertEquals(
EntityStatus.APPROVED,
EntityStatus.UNPROCESSED,
classification.getEntityStatus(),
"Classification should be created with APPROVED status");
"Classification should be created with UNPROCESSED status");
// Update the entityStatus using PATCH operation
String originalJson = JsonUtils.pojoToJson(classification);
classification.setEntityStatus(EntityStatus.IN_REVIEW);
ChangeDescription change = getChangeDescription(classification, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.APPROVED, EntityStatus.IN_REVIEW);
fieldUpdated(change, "entityStatus", EntityStatus.UNPROCESSED, EntityStatus.IN_REVIEW);
Classification updatedClassification =
patchEntityAndCheck(classification, originalJson, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);

View File

@ -742,16 +742,18 @@ public class TagResourceTest extends EntityResourceTest<Tag, CreateTag> {
CreateTag createTag = createRequest(getEntityName(test));
Tag tag = createEntity(createTag, ADMIN_AUTH_HEADERS);
// Verify the tag is created with APPROVED status
// Verify the tag is created with UNPROCESSED status
assertEquals(
EntityStatus.APPROVED, tag.getEntityStatus(), "Tag should be created with APPROVED status");
EntityStatus.UNPROCESSED,
tag.getEntityStatus(),
"Tag should be created with UNPROCESSED status");
// Update the entityStatus using PATCH operation
String originalJson = JsonUtils.pojoToJson(tag);
tag.setEntityStatus(EntityStatus.IN_REVIEW);
ChangeDescription change = getChangeDescription(tag, MINOR_UPDATE);
fieldUpdated(change, "entityStatus", EntityStatus.APPROVED, EntityStatus.IN_REVIEW);
fieldUpdated(change, "entityStatus", EntityStatus.UNPROCESSED, EntityStatus.IN_REVIEW);
Tag updatedTag =
patchEntityAndCheck(tag, originalJson, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);

View File

@ -10,6 +10,8 @@
"In Review",
"Approved",
"Deprecated",
"Rejected"
]
"Rejected",
"Unprocessed"
],
"default": "Unprocessed"
}

View File

@ -149,6 +149,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -150,6 +150,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
export interface InputNamespaceMap {

View File

@ -237,6 +237,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -260,6 +260,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -362,6 +362,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -379,6 +379,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -391,6 +391,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -814,6 +814,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -387,6 +387,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -784,6 +784,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -303,6 +303,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -505,6 +505,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -501,6 +501,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -407,6 +407,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -420,6 +420,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -250,6 +250,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -287,6 +287,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -361,6 +361,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -397,6 +397,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -396,6 +396,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -272,6 +272,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -228,6 +228,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -373,6 +373,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -388,6 +388,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -904,6 +904,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -408,6 +408,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -791,6 +791,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -259,6 +259,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -53,6 +53,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
export interface InputNamespaceMap {

View File

@ -294,6 +294,7 @@ export enum EntityStatus {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}
/**

View File

@ -20,4 +20,5 @@ export enum Status {
Draft = "Draft",
InReview = "In Review",
Rejected = "Rejected",
Unprocessed = "Unprocessed",
}