Fix #15341 - Test Case reference as inherited field for Test Case Incident (#16027)

* fix: unique test computation to scalar_subquery

* fix: make test case reference an inherited field

* style: ran java linting

* fix: added test case resolution migration

* style: ran java linting
This commit is contained in:
Teddy 2024-04-25 17:31:11 +02:00 committed by GitHub
parent e8d9490b0e
commit 4ed87a4d08
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 492 additions and 44 deletions

View File

@ -63,7 +63,7 @@ class ColumnValuesToBeUniqueValidator(
) # type: ignore
try:
self.value = dict(self.runner.dispatch_query_select_first(count, unique_count.subquery("uniqueCount"))) # type: ignore
self.value = dict(self.runner.dispatch_query_select_first(count, unique_count.scalar_subquery().label("uniqueCount"))) # type: ignore
res = self.value.get(Metrics.COUNT.name)
except Exception as exc:
raise SQLAlchemyError(exc)

View File

@ -20,6 +20,7 @@ from .database_test_container import DataBaseTestContainer
class MySQLTestContainer(DataBaseTestContainer):
def __init__(self):
self.mysql_container = MySqlContainer("mysql:latest")
self.mysql_container.with_env("TC_POOLING_INTERVAL", "3")
self.start()
self.connection_url = self.mysql_container.get_connection_url()
super().__init__()

View File

@ -20,6 +20,7 @@ from .database_test_container import DataBaseTestContainer
class PostgresTestContainer(DataBaseTestContainer):
def __init__(self):
self.postgres_container = PostgresContainer("postgres:latest")
self.postgres_container.with_env("TC_POOLING_INTERVAL", "3")
self.start()
self.connection_url = self.postgres_container.get_connection_url()
super().__init__()

View File

@ -1081,7 +1081,13 @@ public abstract class EntityRepository<T extends EntityInterface> {
throw new IllegalArgumentException(CatalogExceptionMessage.entityIsNotEmpty(entityType));
}
// Delete all the contained entities
for (EntityRelationshipRecord entityRelationshipRecord : childrenRecords) {
deleteChildren(childrenRecords, hardDelete, updatedBy);
}
@Transaction
protected void deleteChildren(
List<EntityRelationshipRecord> children, boolean hardDelete, String updatedBy) {
for (EntityRelationshipRecord entityRelationshipRecord : children) {
LOG.info(
"Recursively {} deleting {} {}",
hardDelete ? "hard" : "soft",
@ -1581,7 +1587,7 @@ public abstract class EntityRepository<T extends EntityInterface> {
: null;
}
public final void ensureSingleRelationship(
public static void ensureSingleRelationship(
String entityType,
UUID id,
List<EntityRelationshipRecord> relations,

View File

@ -235,6 +235,13 @@ public interface EntityTimeSeriesDAO {
return getById(getTimeSeriesTableName(), id.toString());
}
@SqlUpdate(value = "DELETE from <table> WHERE id = :id")
void deleteById(@Define("table") String table, @Bind("id") String id);
default void deleteById(UUID id) {
deleteById(getTimeSeriesTableName(), id.toString());
}
/** @deprecated */
@SqlQuery("SELECT COUNT(DISTINCT entityFQN) FROM <table>")
@Deprecated(since = "1.1.1")

View File

@ -1,11 +1,15 @@
package org.openmetadata.service.jdbi3;
import static org.openmetadata.schema.type.Include.ALL;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import lombok.Getter;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.schema.EntityTimeSeriesInterface;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.service.Entity;
import org.openmetadata.service.search.SearchRepository;
import org.openmetadata.service.util.JsonUtils;
@ -38,21 +42,91 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
@Transaction
public T createNewRecord(T recordEntity, String extension, String recordFQN) {
recordEntity.setId(UUID.randomUUID());
timeSeriesDao.insert(recordFQN, extension, entityType, JsonUtils.pojoToJson(recordEntity));
storeInternal(recordEntity, recordFQN, extension);
storeRelationshipInternal(recordEntity);
postCreate(recordEntity);
return recordEntity;
}
public T createNewRecord(T recordEntity, String recordFQN) {
recordEntity.setId(UUID.randomUUID());
storeInternal(recordEntity, recordFQN);
storeRelationshipInternal(recordEntity);
postCreate(recordEntity);
return recordEntity;
}
@Transaction
public T createNewRecord(T recordEntity, String recordFQN) {
recordEntity.setId(UUID.randomUUID());
protected void storeInternal(T recordEntity, String recordFQN) {
timeSeriesDao.insert(recordFQN, entityType, JsonUtils.pojoToJson(recordEntity));
postCreate(recordEntity);
return recordEntity;
}
protected void postCreate(T entity) {
searchRepository.createTimeSeriesEntity(JsonUtils.deepCopy(entity, entityClass));
@Transaction
protected void storeInternal(T recordEntity, String recordFQN, String extension) {
timeSeriesDao.insert(recordFQN, extension, entityType, JsonUtils.pojoToJson(recordEntity));
}
protected void storeRelationshipInternal(T recordEntity) {
storeRelationship(recordEntity);
}
protected void storeRelationship(T recordEntity) {
// Nothing to do in the default implementation
}
protected void setInheritedFields(T recordEntity) {
// Nothing to do in the default implementation
}
@Transaction
public final void addRelationship(
UUID fromId,
UUID toId,
String fromEntity,
String toEntity,
Relationship relationship,
String json,
boolean bidirectional) {
UUID from = fromId;
UUID to = toId;
if (bidirectional && fromId.compareTo(toId) > 0) {
// For bidirectional relationship, instead of adding two row fromId -> toId and toId ->
// fromId, just add one row where fromId is alphabetically less than toId
from = toId;
to = fromId;
}
daoCollection
.relationshipDAO()
.insert(from, to, fromEntity, toEntity, relationship.ordinal(), json);
}
protected void postCreate(T recordEntity) {
searchRepository.createTimeSeriesEntity(JsonUtils.deepCopy(recordEntity, entityClass));
}
protected void postDelete(T recordEntity) {
searchRepository.deleteTimeSeriesEntityById(JsonUtils.deepCopy(recordEntity, entityClass));
}
public final List<CollectionDAO.EntityRelationshipRecord> findFromRecords(
UUID toId, String toEntityType, Relationship relationship, String fromEntityType) {
// When fromEntityType is null, all the relationships from any entity is returned
return fromEntityType == null
? daoCollection.relationshipDAO().findFrom(toId, toEntityType, relationship.ordinal())
: daoCollection
.relationshipDAO()
.findFrom(toId, toEntityType, relationship.ordinal(), fromEntityType);
}
protected EntityReference getFromEntityRef(
UUID toId, Relationship relationship, String fromEntityType, boolean mustHaveRelationship) {
List<CollectionDAO.EntityRelationshipRecord> records =
findFromRecords(toId, entityType, relationship, fromEntityType);
EntityRepository.ensureSingleRelationship(
entityType, toId, records, relationship.value(), fromEntityType, mustHaveRelationship);
return !records.isEmpty()
? Entity.getEntityReferenceById(records.get(0).getType(), records.get(0).getId(), ALL)
: null;
}
public final ResultList<T> getResultList(
@ -90,8 +164,9 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
timeSeriesDao.listWithOffset(filter, limitParam, offsetInt, startTs, endTs, latest);
for (String json : jsons) {
T entity = JsonUtils.readValue(json, entityClass);
entityList.add(entity);
T recordEntity = JsonUtils.readValue(json, entityClass);
setInheritedFields(recordEntity);
entityList.add(recordEntity);
}
return getResultList(entityList, beforeOffset, afterOffset, total);
} else {
@ -109,7 +184,9 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
if (jsonRecord == null) {
return null;
}
return JsonUtils.readValue(jsonRecord, entityClass);
T entityRecord = JsonUtils.readValue(jsonRecord, entityClass);
setInheritedFields(entityRecord);
return entityRecord;
}
public T getById(UUID id) {
@ -117,6 +194,23 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
if (jsonRecord == null) {
return null;
}
return JsonUtils.readValue(jsonRecord, entityClass);
T entityRecord = JsonUtils.readValue(jsonRecord, entityClass);
setInheritedFields(entityRecord);
return entityRecord;
}
public void deleteById(UUID id, boolean hardDelete) {
if (!hardDelete) {
// time series entities by definition cannot be soft deleted (i.e. they do not have a state
// and they should be immutable) thought they can be contained inside entities that can be
// soft deleted
return;
}
T entityRecord = getById(id);
if (entityRecord == null) {
return;
}
timeSeriesDao.deleteById(id);
postDelete(entityRecord);
}
}

View File

@ -414,6 +414,20 @@ public class FeedRepository {
sortPosts(thread);
}
@Transaction
public void closeTaskWithoutWorkflow(Thread thread, String user, CloseTask closeTask) {
TaskDetails task = thread.getTask();
if (task.getStatus() != Open) {
return;
}
task.withStatus(TaskStatus.Closed).withClosedBy(user).withClosedAt(System.currentTimeMillis());
thread.withTask(task).withUpdatedBy(user).withUpdatedAt(System.currentTimeMillis());
dao.feedDAO().update(thread.getId(), JsonUtils.pojoToJson(thread));
addClosingPost(thread, user, closeTask.getComment());
sortPosts(thread);
}
private void storeMentions(Thread thread, String message) {
// Create relationship for users, teams, and other entities that are mentioned in the post
// Multiple mentions of the same entity is handled by taking distinct mentions

View File

@ -275,9 +275,6 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
}
}
// If we delete the test case, we need to clean up the resolution ts
daoCollection.testCaseResolutionStatusTimeSeriesDao().delete(test.getFullyQualifiedName());
deleteTestCaseFailedRowsSample(test.getId());
}
@ -372,13 +369,34 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
.withUpdatedAt(System.currentTimeMillis())
.withTestCaseReference(testCase.getEntityReference());
testCaseResolutionStatusRepository.createNewRecord(status, testCase.getFullyQualifiedName());
TestCaseResolutionStatus incident =
testCaseResolutionStatusRepository.createNewRecord(
status, testCase.getFullyQualifiedName());
testCaseResolutionStatusRepository.getLatestRecord(testCase.getFullyQualifiedName());
return incident.getStateId();
}
@Transaction
@Override
protected void deleteChildren(
List<CollectionDAO.EntityRelationshipRecord> children, boolean hardDelete, String updatedBy) {
if (hardDelete) {
for (CollectionDAO.EntityRelationshipRecord entityRelationshipRecord : children) {
LOG.info(
"Recursively {} deleting {} {}",
hardDelete ? "hard" : "soft",
entityRelationshipRecord.getType(),
entityRelationshipRecord.getId());
TestCaseResolutionStatusRepository testCaseResolutionStatusRepository =
(TestCaseResolutionStatusRepository)
Entity.getEntityTimeSeriesRepository(Entity.TEST_CASE_RESOLUTION_STATUS);
for (CollectionDAO.EntityRelationshipRecord child : children) {
testCaseResolutionStatusRepository.deleteById(child.getId(), hardDelete);
}
}
}
}
public RestUtil.PutResponse<TestCaseResult> deleteTestCaseResult(
String updatedBy, String fqn, Long timestamp) {
// Validate the request content
@ -778,12 +796,16 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
.withUpdatedBy(user.getEntityReference());
EntityReference testCaseReference = testCaseResolutionStatus.getTestCaseReference();
testCaseResolutionStatus.setTestCaseReference(null);
Entity.getCollectionDAO()
.testCaseResolutionStatusTimeSeriesDao()
.insert(
testCaseResolutionStatus.getTestCaseReference().getFullyQualifiedName(),
testCaseReference.getFullyQualifiedName(),
Entity.TEST_CASE_RESOLUTION_STATUS,
JsonUtils.pojoToJson(testCaseResolutionStatus));
testCaseResolutionStatus.setTestCaseReference(testCaseReference);
testCaseResolutionStatusRepository.storeRelationship(testCaseResolutionStatus);
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
// Return the TestCase with the StateId to avoid any unnecessary PATCH when resolving the task
@ -833,12 +855,16 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
.withUpdatedBy(user.getEntityReference());
EntityReference testCaseReference = testCaseResolutionStatus.getTestCaseReference();
testCaseResolutionStatus.setTestCaseReference(null);
Entity.getCollectionDAO()
.testCaseResolutionStatusTimeSeriesDao()
.insert(
testCaseResolutionStatus.getTestCaseReference().getFullyQualifiedName(),
testCaseReference.getFullyQualifiedName(),
Entity.TEST_CASE_RESOLUTION_STATUS,
JsonUtils.pojoToJson(testCaseResolutionStatus));
testCaseResolutionStatus.setTestCaseReference(testCaseReference);
testCaseResolutionStatusRepository.storeRelationship(testCaseResolutionStatus);
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
}
}

View File

@ -7,6 +7,7 @@ import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
@ -14,6 +15,7 @@ import javax.json.JsonPatch;
import javax.ws.rs.core.Response;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.schema.EntityInterface;
import org.openmetadata.schema.api.feed.CloseTask;
import org.openmetadata.schema.api.feed.ResolveTask;
import org.openmetadata.schema.entity.feed.Thread;
import org.openmetadata.schema.entity.teams.User;
@ -25,6 +27,7 @@ import org.openmetadata.schema.tests.type.TestCaseResolutionStatus;
import org.openmetadata.schema.tests.type.TestCaseResolutionStatusTypes;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.TaskDetails;
import org.openmetadata.schema.type.TaskStatus;
import org.openmetadata.schema.type.TaskType;
@ -53,11 +56,17 @@ public class TestCaseResolutionStatusRepository
public ResultList<TestCaseResolutionStatus> listTestCaseResolutionStatusesForStateId(
UUID stateId) {
List<TestCaseResolutionStatus> testCaseResolutionStatuses = new ArrayList<>();
List<String> jsons =
((CollectionDAO.TestCaseResolutionStatusTimeSeriesDAO) timeSeriesDao)
.listTestCaseResolutionStatusesForStateId(stateId.toString());
List<TestCaseResolutionStatus> testCaseResolutionStatuses =
JsonUtils.readObjects(jsons, TestCaseResolutionStatus.class);
for (String json : jsons) {
TestCaseResolutionStatus testCaseResolutionStatus =
JsonUtils.readValue(json, TestCaseResolutionStatus.class);
setInheritedFields(testCaseResolutionStatus);
testCaseResolutionStatuses.add(testCaseResolutionStatus);
}
return getResultList(testCaseResolutionStatuses, null, null, testCaseResolutionStatuses.size());
}
@ -145,11 +154,9 @@ public class TestCaseResolutionStatusRepository
@Override
@Transaction
public TestCaseResolutionStatus createNewRecord(
TestCaseResolutionStatus recordEntity, String recordFQN) {
public void storeInternal(TestCaseResolutionStatus recordEntity, String recordFQN) {
TestCaseResolutionStatus lastIncident =
getLatestRecord(recordEntity.getTestCaseReference().getFullyQualifiedName());
TestCaseResolutionStatus lastIncident = getLatestRecord(recordFQN);
if (recordEntity.getStateId() == null) {
recordEntity.setStateId(UUID.randomUUID());
@ -177,7 +184,7 @@ public class TestCaseResolutionStatusRepository
case New -> {
// If there is already an existing New incident we'll return it
if (Boolean.TRUE.equals(unresolvedIncident(lastIncident))) {
return lastIncident;
return;
}
}
case Ack, Assigned -> openOrAssignTask(recordEntity);
@ -187,12 +194,33 @@ public class TestCaseResolutionStatusRepository
// We don't create a new record. The new status will be added via the
// TestCaseFailureResolutionTaskWorkflow
// implemented in the TestCaseRepository.
return getLatestRecord(recordEntity.getTestCaseReference().getFullyQualifiedName());
return;
}
default -> throw new IllegalArgumentException(
String.format("Invalid status %s", recordEntity.getTestCaseResolutionStatusType()));
}
return super.createNewRecord(recordEntity, recordFQN);
EntityReference testCaseReference = recordEntity.getTestCaseReference();
recordEntity.withTestCaseReference(null); // we don't want to store the reference in the record
super.storeInternal(recordEntity, recordFQN);
recordEntity.withTestCaseReference(testCaseReference);
}
@Override
protected void storeRelationship(TestCaseResolutionStatus recordEntity) {
addRelationship(
recordEntity.getTestCaseReference().getId(),
recordEntity.getId(),
Entity.TEST_CASE,
Entity.TEST_CASE_RESOLUTION_STATUS,
Relationship.PARENT_OF,
null,
false);
}
@Override
protected void setInheritedFields(TestCaseResolutionStatus recordEntity) {
recordEntity.setTestCaseReference(
getFromEntityRef(recordEntity.getId(), Relationship.PARENT_OF, Entity.TEST_CASE, true));
}
private void openOrAssignTask(TestCaseResolutionStatus incidentStatus) {
@ -253,17 +281,20 @@ public class TestCaseResolutionStatusRepository
Thread thread = getIncidentTask(lastIncidentStatus);
if (thread != null) {
// If there is an existing task, we'll resolve it and create a new incident
// status with the Resolved status flow
// If there is an existing task, we'll close it without performing the workflow
// (i.e. creating a new incident which will be handled here).
FeedRepository.ThreadContext threadContext = new FeedRepository.ThreadContext(thread);
threadContext.getThread().getTask().withNewValue(resolveTask.getNewValue());
Entity.getFeedRepository()
.resolveTask(
new FeedRepository.ThreadContext(thread),
updatedBy.getFullyQualifiedName(),
resolveTask);
} else {
// if there is no task, we'll simply create a new incident status (e.g. New -> Resolved)
super.createNewRecord(newIncidentStatus, testCase.getFullyQualifiedName());
.closeTaskWithoutWorkflow(
threadContext.getThread(), updatedBy.getFullyQualifiedName(), new CloseTask());
}
// if there is no task, we'll simply create a new incident status (e.g. New -> Resolved)
EntityReference testCaseReference = newIncidentStatus.getTestCaseReference();
newIncidentStatus.setTestCaseReference(
null); // we don't want to store the reference in the record
super.storeInternal(newIncidentStatus, testCase.getFullyQualifiedName());
newIncidentStatus.setTestCaseReference(testCaseReference);
}
private void createTask(

View File

@ -2,6 +2,7 @@ package org.openmetadata.service.migration.mysql.v140;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTestCaseResolution;
import lombok.SneakyThrows;
import org.jdbi.v3.core.Handle;
@ -27,9 +28,13 @@ public class Migration extends MigrationProcessImpl {
@Override
@SneakyThrows
public void runDataMigration() {
// Migrate Table Partition
migrateTablePartition(handle, collectionDAO);
// Migrate Generic to Webhook
migrateGenericToWebhook(collectionDAO);
// Migrate Test case resolution status
migrateTestCaseResolution(handle, collectionDAO);
}
}

View File

@ -2,6 +2,7 @@ package org.openmetadata.service.migration.postgres.v140;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTestCaseResolution;
import lombok.SneakyThrows;
import org.jdbi.v3.core.Handle;
@ -27,9 +28,13 @@ public class Migration extends MigrationProcessImpl {
@Override
@SneakyThrows
public void runDataMigration() {
// Migrate Table Partition
migrateTablePartition(handle, collectionDAO);
// Migrate Generic to Webhook
migrateGenericToWebhook(collectionDAO);
// Migrate Test case resolution status
migrateTestCaseResolution(handle, collectionDAO);
}
}

View File

@ -3,6 +3,7 @@ package org.openmetadata.service.migration.utils.v140;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonArrayBuilder;
@ -16,9 +17,12 @@ import org.json.JSONObject;
import org.openmetadata.schema.api.services.CreateDatabaseService;
import org.openmetadata.schema.entity.data.Table;
import org.openmetadata.schema.entity.events.EventSubscription;
import org.openmetadata.schema.tests.type.TestCaseResolutionStatus;
import org.openmetadata.schema.type.PartitionColumnDetails;
import org.openmetadata.schema.type.PartitionIntervalTypes;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.TablePartition;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.resources.databases.DatasourceConfig;
import org.openmetadata.service.util.JsonUtils;
@ -34,6 +38,13 @@ public class MigrationUtil {
private static final String POSTGRES_QUERY_TABLES_WITH_PARTITION =
"SELECT json " + "FROM table_entity " + "WHERE json->'tablePartition' IS NOT NULL";
private static final String TEST_CASE_RESOLUTION_QUERY =
"SELECT json FROM test_case_resolution_status_time_series";
private static final String MYSQL_TEST_CASE_RESOLUTION_UPDATE_QUERY =
"UPDATE test_case_resolution_status_time_series SET json = :json WHERE id = :id";
private static final String POSTGRES_TEST_CASE_RESOLUTION_UPDATE_QUERY =
"UPDATE test_case_resolution_status_time_series SET json = :json::jsonb WHERE id = :id";
private MigrationUtil() {
/* Cannot create object util class*/
}
@ -64,6 +75,50 @@ public class MigrationUtil {
}
}
public static void migrateTestCaseResolution(Handle handle, CollectionDAO collectionDAO) {
try {
handle
.createQuery(TEST_CASE_RESOLUTION_QUERY)
.mapToMap()
.forEach(
row -> {
try {
TestCaseResolutionStatus testCaseResolutionStatus =
JsonUtils.readValue(
row.get("json").toString(), TestCaseResolutionStatus.class);
UUID fromId = testCaseResolutionStatus.getTestCaseReference().getId();
UUID toId = testCaseResolutionStatus.getId();
// Store the test case <-> incident relationship
collectionDAO
.relationshipDAO()
.insert(
fromId,
toId,
Entity.TEST_CASE,
Entity.TEST_CASE_RESOLUTION_STATUS,
Relationship.PARENT_OF.ordinal(),
null);
// Remove the test case reference from the test case resolution status
testCaseResolutionStatus.setTestCaseReference(null);
String json = JsonUtils.pojoToJson(testCaseResolutionStatus);
String updateQuery = MYSQL_TEST_CASE_RESOLUTION_UPDATE_QUERY;
if (Boolean.FALSE.equals(DatasourceConfig.getInstance().isMySQL())) {
updateQuery = POSTGRES_TEST_CASE_RESOLUTION_UPDATE_QUERY;
}
handle
.createUpdate(updateQuery)
.bind("json", json)
.bind("id", toId.toString())
.execute();
} catch (Exception ex) {
LOG.warn("Error during the test case resolution migration due to ", ex);
}
});
} catch (Exception ex) {
LOG.warn("Error running the test case resolution migration ", ex);
}
}
public static void migrateTablePartition(Handle handle, CollectionDAO collectionDAO) {
try {
if (Boolean.TRUE.equals(DatasourceConfig.getInstance().isMySQL())) {

View File

@ -737,6 +737,11 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
@QueryParam("hardDelete")
@DefaultValue("false")
boolean hardDelete,
@Parameter(
description = "Recursively delete this entity and it's children. (Default `false`)")
@DefaultValue("false")
@QueryParam("recursive")
boolean recursive,
@Parameter(description = "Id of the test case", schema = @Schema(type = "UUID"))
@PathParam("id")
UUID id) {
@ -746,7 +751,7 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
OperationContext operationContext =
new OperationContext(Entity.TABLE, MetadataOperation.EDIT_TESTS);
authorizer.authorize(securityContext, operationContext, resourceContext);
return delete(uriInfo, securityContext, id, false, hardDelete);
return delete(uriInfo, securityContext, id, recursive, hardDelete);
}
@DELETE
@ -766,12 +771,17 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
@QueryParam("hardDelete")
@DefaultValue("false")
boolean hardDelete,
@Parameter(
description = "Recursively delete this entity and it's children. (Default `false`)")
@DefaultValue("false")
@QueryParam("recursive")
boolean recursive,
@Parameter(
description = "Fully qualified name of the test case",
schema = @Schema(type = "string"))
@PathParam("fqn")
String fqn) {
return deleteByName(uriInfo, securityContext, fqn, false, hardDelete);
return deleteByName(uriInfo, securityContext, fqn, recursive, hardDelete);
}
@DELETE

View File

@ -502,6 +502,26 @@ public class SearchRepository {
}
}
public void deleteTimeSeriesEntityById(EntityTimeSeriesInterface entity) {
if (entity != null) {
String entityId = entity.getId().toString();
String entityType = entity.getEntityReference().getType();
IndexMapping indexMapping = entityIndexMap.get(entityType);
try {
searchClient.deleteEntity(indexMapping.getIndexName(clusterAlias), entityId);
} catch (Exception ie) {
LOG.error(
String.format(
"Issue in Deleting the search document for entityID [%s] and entityType [%s]. Reason[%s], Cause[%s], Stack [%s]",
entityId,
entityType,
ie.getMessage(),
ie.getCause(),
ExceptionUtils.getStackTrace(ie)));
}
}
}
public void softDeleteOrRestoreEntity(EntityInterface entity, boolean delete) {
if (entity != null) {
String entityId = entity.getId().toString();

View File

@ -191,7 +191,58 @@
}
},
"updatedBy": {
"type": "text"
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "keyword"
},
"name": {
"type": "keyword",
"normalizer": "lowercase_normalizer",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"displayName": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text",
"analyzer": "om_analyzer",
"fields": {
"keyword": {
"type": "keyword",
"normalizer": "lowercase_normalizer"
}
}
},
"deleted": {
"type": "text"
},
"href": {
"type": "text"
}
}
},
"updatedAt": {
"type": "date"

View File

@ -205,7 +205,58 @@
}
},
"updatedBy": {
"type": "text"
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "keyword"
},
"name": {
"type": "keyword",
"normalizer": "lowercase_normalizer",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"displayName": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text",
"analyzer": "om_analyzer",
"fields": {
"keyword": {
"type": "keyword",
"normalizer": "lowercase_normalizer"
}
}
},
"deleted": {
"type": "text"
},
"href": {
"type": "text"
}
}
},
"updatedAt": {
"type": "date"

View File

@ -195,7 +195,58 @@
}
},
"updatedBy": {
"type": "text"
"properties": {
"id": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 36
}
}
},
"type": {
"type": "keyword"
},
"name": {
"type": "keyword",
"normalizer": "lowercase_normalizer",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"displayName": {
"type": "keyword",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"fullyQualifiedName": {
"type": "text"
},
"description": {
"type": "text",
"analyzer": "om_analyzer",
"fields": {
"keyword": {
"type": "keyword",
"normalizer": "lowercase_normalizer"
}
}
},
"deleted": {
"type": "text"
},
"href": {
"type": "text"
}
}
},
"updatedAt": {
"type": "date"

View File

@ -1333,6 +1333,25 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
assertEquals(
TestCaseResolutionStatusTypes.Ack,
storedTestCaseResolutions.getData().get(0).getTestCaseResolutionStatusType());
// Delete test case recursively and check that the test case resolution status is also deleted
// 1. soft delete - should not delete the test case resolution status
// 2. hard delete - should delete the test case resolution status
deleteEntity(testCaseEntity1.getId(), true, false, ADMIN_AUTH_HEADERS);
storedTestCaseResolutions =
getTestCaseFailureStatus(startTs, endTs, null, TestCaseResolutionStatusTypes.Ack);
assertEquals(2, storedTestCaseResolutions.getData().size());
assertTrue(
storedTestCaseResolutions.getData().stream()
.anyMatch(t -> t.getTestCaseReference().getId().equals(testCaseEntity1.getId())));
deleteEntity(testCaseEntity1.getId(), true, true, ADMIN_AUTH_HEADERS);
storedTestCaseResolutions =
getTestCaseFailureStatus(startTs, endTs, null, TestCaseResolutionStatusTypes.Ack);
assertEquals(1, storedTestCaseResolutions.getData().size());
assertTrue(
storedTestCaseResolutions.getData().stream()
.noneMatch(t -> t.getTestCaseReference().getId().equals(testCaseEntity1.getId())));
}
@Test

View File

@ -485,6 +485,7 @@ const DataQualityTab: React.FC<DataQualityTabProps> = ({
/>
) : (
<DeleteWidgetModal
isRecursiveDelete
afterDeleteAction={afterDeleteAction}
allowSoftDelete={false}
entityId={selectedTestCase?.data?.id ?? ''}