mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-09-02 05:33:49 +00:00
* fix: unique test computation to scalar_subquery * fix: make test case reference an inherited field * style: ran java linting * fix: added test case resolution migration * style: ran java linting
This commit is contained in:
parent
e8d9490b0e
commit
4ed87a4d08
@ -63,7 +63,7 @@ class ColumnValuesToBeUniqueValidator(
|
|||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.value = dict(self.runner.dispatch_query_select_first(count, unique_count.subquery("uniqueCount"))) # type: ignore
|
self.value = dict(self.runner.dispatch_query_select_first(count, unique_count.scalar_subquery().label("uniqueCount"))) # type: ignore
|
||||||
res = self.value.get(Metrics.COUNT.name)
|
res = self.value.get(Metrics.COUNT.name)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise SQLAlchemyError(exc)
|
raise SQLAlchemyError(exc)
|
||||||
|
@ -20,6 +20,7 @@ from .database_test_container import DataBaseTestContainer
|
|||||||
class MySQLTestContainer(DataBaseTestContainer):
|
class MySQLTestContainer(DataBaseTestContainer):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.mysql_container = MySqlContainer("mysql:latest")
|
self.mysql_container = MySqlContainer("mysql:latest")
|
||||||
|
self.mysql_container.with_env("TC_POOLING_INTERVAL", "3")
|
||||||
self.start()
|
self.start()
|
||||||
self.connection_url = self.mysql_container.get_connection_url()
|
self.connection_url = self.mysql_container.get_connection_url()
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -20,6 +20,7 @@ from .database_test_container import DataBaseTestContainer
|
|||||||
class PostgresTestContainer(DataBaseTestContainer):
|
class PostgresTestContainer(DataBaseTestContainer):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.postgres_container = PostgresContainer("postgres:latest")
|
self.postgres_container = PostgresContainer("postgres:latest")
|
||||||
|
self.postgres_container.with_env("TC_POOLING_INTERVAL", "3")
|
||||||
self.start()
|
self.start()
|
||||||
self.connection_url = self.postgres_container.get_connection_url()
|
self.connection_url = self.postgres_container.get_connection_url()
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -1081,7 +1081,13 @@ public abstract class EntityRepository<T extends EntityInterface> {
|
|||||||
throw new IllegalArgumentException(CatalogExceptionMessage.entityIsNotEmpty(entityType));
|
throw new IllegalArgumentException(CatalogExceptionMessage.entityIsNotEmpty(entityType));
|
||||||
}
|
}
|
||||||
// Delete all the contained entities
|
// Delete all the contained entities
|
||||||
for (EntityRelationshipRecord entityRelationshipRecord : childrenRecords) {
|
deleteChildren(childrenRecords, hardDelete, updatedBy);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transaction
|
||||||
|
protected void deleteChildren(
|
||||||
|
List<EntityRelationshipRecord> children, boolean hardDelete, String updatedBy) {
|
||||||
|
for (EntityRelationshipRecord entityRelationshipRecord : children) {
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Recursively {} deleting {} {}",
|
"Recursively {} deleting {} {}",
|
||||||
hardDelete ? "hard" : "soft",
|
hardDelete ? "hard" : "soft",
|
||||||
@ -1581,7 +1587,7 @@ public abstract class EntityRepository<T extends EntityInterface> {
|
|||||||
: null;
|
: null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final void ensureSingleRelationship(
|
public static void ensureSingleRelationship(
|
||||||
String entityType,
|
String entityType,
|
||||||
UUID id,
|
UUID id,
|
||||||
List<EntityRelationshipRecord> relations,
|
List<EntityRelationshipRecord> relations,
|
||||||
|
@ -235,6 +235,13 @@ public interface EntityTimeSeriesDAO {
|
|||||||
return getById(getTimeSeriesTableName(), id.toString());
|
return getById(getTimeSeriesTableName(), id.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SqlUpdate(value = "DELETE from <table> WHERE id = :id")
|
||||||
|
void deleteById(@Define("table") String table, @Bind("id") String id);
|
||||||
|
|
||||||
|
default void deleteById(UUID id) {
|
||||||
|
deleteById(getTimeSeriesTableName(), id.toString());
|
||||||
|
}
|
||||||
|
|
||||||
/** @deprecated */
|
/** @deprecated */
|
||||||
@SqlQuery("SELECT COUNT(DISTINCT entityFQN) FROM <table>")
|
@SqlQuery("SELECT COUNT(DISTINCT entityFQN) FROM <table>")
|
||||||
@Deprecated(since = "1.1.1")
|
@Deprecated(since = "1.1.1")
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
package org.openmetadata.service.jdbi3;
|
package org.openmetadata.service.jdbi3;
|
||||||
|
|
||||||
|
import static org.openmetadata.schema.type.Include.ALL;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import org.jdbi.v3.sqlobject.transaction.Transaction;
|
import org.jdbi.v3.sqlobject.transaction.Transaction;
|
||||||
import org.openmetadata.schema.EntityTimeSeriesInterface;
|
import org.openmetadata.schema.EntityTimeSeriesInterface;
|
||||||
|
import org.openmetadata.schema.type.EntityReference;
|
||||||
|
import org.openmetadata.schema.type.Relationship;
|
||||||
import org.openmetadata.service.Entity;
|
import org.openmetadata.service.Entity;
|
||||||
import org.openmetadata.service.search.SearchRepository;
|
import org.openmetadata.service.search.SearchRepository;
|
||||||
import org.openmetadata.service.util.JsonUtils;
|
import org.openmetadata.service.util.JsonUtils;
|
||||||
@ -38,21 +42,91 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
|
|||||||
@Transaction
|
@Transaction
|
||||||
public T createNewRecord(T recordEntity, String extension, String recordFQN) {
|
public T createNewRecord(T recordEntity, String extension, String recordFQN) {
|
||||||
recordEntity.setId(UUID.randomUUID());
|
recordEntity.setId(UUID.randomUUID());
|
||||||
timeSeriesDao.insert(recordFQN, extension, entityType, JsonUtils.pojoToJson(recordEntity));
|
storeInternal(recordEntity, recordFQN, extension);
|
||||||
|
storeRelationshipInternal(recordEntity);
|
||||||
|
postCreate(recordEntity);
|
||||||
|
return recordEntity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public T createNewRecord(T recordEntity, String recordFQN) {
|
||||||
|
recordEntity.setId(UUID.randomUUID());
|
||||||
|
storeInternal(recordEntity, recordFQN);
|
||||||
|
storeRelationshipInternal(recordEntity);
|
||||||
postCreate(recordEntity);
|
postCreate(recordEntity);
|
||||||
return recordEntity;
|
return recordEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transaction
|
@Transaction
|
||||||
public T createNewRecord(T recordEntity, String recordFQN) {
|
protected void storeInternal(T recordEntity, String recordFQN) {
|
||||||
recordEntity.setId(UUID.randomUUID());
|
|
||||||
timeSeriesDao.insert(recordFQN, entityType, JsonUtils.pojoToJson(recordEntity));
|
timeSeriesDao.insert(recordFQN, entityType, JsonUtils.pojoToJson(recordEntity));
|
||||||
postCreate(recordEntity);
|
|
||||||
return recordEntity;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void postCreate(T entity) {
|
@Transaction
|
||||||
searchRepository.createTimeSeriesEntity(JsonUtils.deepCopy(entity, entityClass));
|
protected void storeInternal(T recordEntity, String recordFQN, String extension) {
|
||||||
|
timeSeriesDao.insert(recordFQN, extension, entityType, JsonUtils.pojoToJson(recordEntity));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void storeRelationshipInternal(T recordEntity) {
|
||||||
|
storeRelationship(recordEntity);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void storeRelationship(T recordEntity) {
|
||||||
|
// Nothing to do in the default implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void setInheritedFields(T recordEntity) {
|
||||||
|
// Nothing to do in the default implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
@Transaction
|
||||||
|
public final void addRelationship(
|
||||||
|
UUID fromId,
|
||||||
|
UUID toId,
|
||||||
|
String fromEntity,
|
||||||
|
String toEntity,
|
||||||
|
Relationship relationship,
|
||||||
|
String json,
|
||||||
|
boolean bidirectional) {
|
||||||
|
UUID from = fromId;
|
||||||
|
UUID to = toId;
|
||||||
|
if (bidirectional && fromId.compareTo(toId) > 0) {
|
||||||
|
// For bidirectional relationship, instead of adding two row fromId -> toId and toId ->
|
||||||
|
// fromId, just add one row where fromId is alphabetically less than toId
|
||||||
|
from = toId;
|
||||||
|
to = fromId;
|
||||||
|
}
|
||||||
|
daoCollection
|
||||||
|
.relationshipDAO()
|
||||||
|
.insert(from, to, fromEntity, toEntity, relationship.ordinal(), json);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void postCreate(T recordEntity) {
|
||||||
|
searchRepository.createTimeSeriesEntity(JsonUtils.deepCopy(recordEntity, entityClass));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void postDelete(T recordEntity) {
|
||||||
|
searchRepository.deleteTimeSeriesEntityById(JsonUtils.deepCopy(recordEntity, entityClass));
|
||||||
|
}
|
||||||
|
|
||||||
|
public final List<CollectionDAO.EntityRelationshipRecord> findFromRecords(
|
||||||
|
UUID toId, String toEntityType, Relationship relationship, String fromEntityType) {
|
||||||
|
// When fromEntityType is null, all the relationships from any entity is returned
|
||||||
|
return fromEntityType == null
|
||||||
|
? daoCollection.relationshipDAO().findFrom(toId, toEntityType, relationship.ordinal())
|
||||||
|
: daoCollection
|
||||||
|
.relationshipDAO()
|
||||||
|
.findFrom(toId, toEntityType, relationship.ordinal(), fromEntityType);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected EntityReference getFromEntityRef(
|
||||||
|
UUID toId, Relationship relationship, String fromEntityType, boolean mustHaveRelationship) {
|
||||||
|
List<CollectionDAO.EntityRelationshipRecord> records =
|
||||||
|
findFromRecords(toId, entityType, relationship, fromEntityType);
|
||||||
|
EntityRepository.ensureSingleRelationship(
|
||||||
|
entityType, toId, records, relationship.value(), fromEntityType, mustHaveRelationship);
|
||||||
|
return !records.isEmpty()
|
||||||
|
? Entity.getEntityReferenceById(records.get(0).getType(), records.get(0).getId(), ALL)
|
||||||
|
: null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final ResultList<T> getResultList(
|
public final ResultList<T> getResultList(
|
||||||
@ -90,8 +164,9 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
|
|||||||
timeSeriesDao.listWithOffset(filter, limitParam, offsetInt, startTs, endTs, latest);
|
timeSeriesDao.listWithOffset(filter, limitParam, offsetInt, startTs, endTs, latest);
|
||||||
|
|
||||||
for (String json : jsons) {
|
for (String json : jsons) {
|
||||||
T entity = JsonUtils.readValue(json, entityClass);
|
T recordEntity = JsonUtils.readValue(json, entityClass);
|
||||||
entityList.add(entity);
|
setInheritedFields(recordEntity);
|
||||||
|
entityList.add(recordEntity);
|
||||||
}
|
}
|
||||||
return getResultList(entityList, beforeOffset, afterOffset, total);
|
return getResultList(entityList, beforeOffset, afterOffset, total);
|
||||||
} else {
|
} else {
|
||||||
@ -109,7 +184,9 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
|
|||||||
if (jsonRecord == null) {
|
if (jsonRecord == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return JsonUtils.readValue(jsonRecord, entityClass);
|
T entityRecord = JsonUtils.readValue(jsonRecord, entityClass);
|
||||||
|
setInheritedFields(entityRecord);
|
||||||
|
return entityRecord;
|
||||||
}
|
}
|
||||||
|
|
||||||
public T getById(UUID id) {
|
public T getById(UUID id) {
|
||||||
@ -117,6 +194,23 @@ public abstract class EntityTimeSeriesRepository<T extends EntityTimeSeriesInter
|
|||||||
if (jsonRecord == null) {
|
if (jsonRecord == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return JsonUtils.readValue(jsonRecord, entityClass);
|
T entityRecord = JsonUtils.readValue(jsonRecord, entityClass);
|
||||||
|
setInheritedFields(entityRecord);
|
||||||
|
return entityRecord;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteById(UUID id, boolean hardDelete) {
|
||||||
|
if (!hardDelete) {
|
||||||
|
// time series entities by definition cannot be soft deleted (i.e. they do not have a state
|
||||||
|
// and they should be immutable) thought they can be contained inside entities that can be
|
||||||
|
// soft deleted
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
T entityRecord = getById(id);
|
||||||
|
if (entityRecord == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
timeSeriesDao.deleteById(id);
|
||||||
|
postDelete(entityRecord);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -414,6 +414,20 @@ public class FeedRepository {
|
|||||||
sortPosts(thread);
|
sortPosts(thread);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transaction
|
||||||
|
public void closeTaskWithoutWorkflow(Thread thread, String user, CloseTask closeTask) {
|
||||||
|
TaskDetails task = thread.getTask();
|
||||||
|
if (task.getStatus() != Open) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
task.withStatus(TaskStatus.Closed).withClosedBy(user).withClosedAt(System.currentTimeMillis());
|
||||||
|
thread.withTask(task).withUpdatedBy(user).withUpdatedAt(System.currentTimeMillis());
|
||||||
|
|
||||||
|
dao.feedDAO().update(thread.getId(), JsonUtils.pojoToJson(thread));
|
||||||
|
addClosingPost(thread, user, closeTask.getComment());
|
||||||
|
sortPosts(thread);
|
||||||
|
}
|
||||||
|
|
||||||
private void storeMentions(Thread thread, String message) {
|
private void storeMentions(Thread thread, String message) {
|
||||||
// Create relationship for users, teams, and other entities that are mentioned in the post
|
// Create relationship for users, teams, and other entities that are mentioned in the post
|
||||||
// Multiple mentions of the same entity is handled by taking distinct mentions
|
// Multiple mentions of the same entity is handled by taking distinct mentions
|
||||||
|
@ -275,9 +275,6 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we delete the test case, we need to clean up the resolution ts
|
|
||||||
daoCollection.testCaseResolutionStatusTimeSeriesDao().delete(test.getFullyQualifiedName());
|
|
||||||
|
|
||||||
deleteTestCaseFailedRowsSample(test.getId());
|
deleteTestCaseFailedRowsSample(test.getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -372,13 +369,34 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
|
|||||||
.withUpdatedAt(System.currentTimeMillis())
|
.withUpdatedAt(System.currentTimeMillis())
|
||||||
.withTestCaseReference(testCase.getEntityReference());
|
.withTestCaseReference(testCase.getEntityReference());
|
||||||
|
|
||||||
|
testCaseResolutionStatusRepository.createNewRecord(status, testCase.getFullyQualifiedName());
|
||||||
TestCaseResolutionStatus incident =
|
TestCaseResolutionStatus incident =
|
||||||
testCaseResolutionStatusRepository.createNewRecord(
|
testCaseResolutionStatusRepository.getLatestRecord(testCase.getFullyQualifiedName());
|
||||||
status, testCase.getFullyQualifiedName());
|
|
||||||
|
|
||||||
return incident.getStateId();
|
return incident.getStateId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Transaction
|
||||||
|
@Override
|
||||||
|
protected void deleteChildren(
|
||||||
|
List<CollectionDAO.EntityRelationshipRecord> children, boolean hardDelete, String updatedBy) {
|
||||||
|
if (hardDelete) {
|
||||||
|
for (CollectionDAO.EntityRelationshipRecord entityRelationshipRecord : children) {
|
||||||
|
LOG.info(
|
||||||
|
"Recursively {} deleting {} {}",
|
||||||
|
hardDelete ? "hard" : "soft",
|
||||||
|
entityRelationshipRecord.getType(),
|
||||||
|
entityRelationshipRecord.getId());
|
||||||
|
TestCaseResolutionStatusRepository testCaseResolutionStatusRepository =
|
||||||
|
(TestCaseResolutionStatusRepository)
|
||||||
|
Entity.getEntityTimeSeriesRepository(Entity.TEST_CASE_RESOLUTION_STATUS);
|
||||||
|
for (CollectionDAO.EntityRelationshipRecord child : children) {
|
||||||
|
testCaseResolutionStatusRepository.deleteById(child.getId(), hardDelete);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public RestUtil.PutResponse<TestCaseResult> deleteTestCaseResult(
|
public RestUtil.PutResponse<TestCaseResult> deleteTestCaseResult(
|
||||||
String updatedBy, String fqn, Long timestamp) {
|
String updatedBy, String fqn, Long timestamp) {
|
||||||
// Validate the request content
|
// Validate the request content
|
||||||
@ -778,12 +796,16 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
|
|||||||
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
|
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
|
||||||
.withUpdatedBy(user.getEntityReference());
|
.withUpdatedBy(user.getEntityReference());
|
||||||
|
|
||||||
|
EntityReference testCaseReference = testCaseResolutionStatus.getTestCaseReference();
|
||||||
|
testCaseResolutionStatus.setTestCaseReference(null);
|
||||||
Entity.getCollectionDAO()
|
Entity.getCollectionDAO()
|
||||||
.testCaseResolutionStatusTimeSeriesDao()
|
.testCaseResolutionStatusTimeSeriesDao()
|
||||||
.insert(
|
.insert(
|
||||||
testCaseResolutionStatus.getTestCaseReference().getFullyQualifiedName(),
|
testCaseReference.getFullyQualifiedName(),
|
||||||
Entity.TEST_CASE_RESOLUTION_STATUS,
|
Entity.TEST_CASE_RESOLUTION_STATUS,
|
||||||
JsonUtils.pojoToJson(testCaseResolutionStatus));
|
JsonUtils.pojoToJson(testCaseResolutionStatus));
|
||||||
|
testCaseResolutionStatus.setTestCaseReference(testCaseReference);
|
||||||
|
testCaseResolutionStatusRepository.storeRelationship(testCaseResolutionStatus);
|
||||||
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
|
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
|
||||||
|
|
||||||
// Return the TestCase with the StateId to avoid any unnecessary PATCH when resolving the task
|
// Return the TestCase with the StateId to avoid any unnecessary PATCH when resolving the task
|
||||||
@ -833,12 +855,16 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
|
|||||||
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
|
.withTestCaseReference(latestTestCaseResolutionStatus.getTestCaseReference())
|
||||||
.withUpdatedBy(user.getEntityReference());
|
.withUpdatedBy(user.getEntityReference());
|
||||||
|
|
||||||
|
EntityReference testCaseReference = testCaseResolutionStatus.getTestCaseReference();
|
||||||
|
testCaseResolutionStatus.setTestCaseReference(null);
|
||||||
Entity.getCollectionDAO()
|
Entity.getCollectionDAO()
|
||||||
.testCaseResolutionStatusTimeSeriesDao()
|
.testCaseResolutionStatusTimeSeriesDao()
|
||||||
.insert(
|
.insert(
|
||||||
testCaseResolutionStatus.getTestCaseReference().getFullyQualifiedName(),
|
testCaseReference.getFullyQualifiedName(),
|
||||||
Entity.TEST_CASE_RESOLUTION_STATUS,
|
Entity.TEST_CASE_RESOLUTION_STATUS,
|
||||||
JsonUtils.pojoToJson(testCaseResolutionStatus));
|
JsonUtils.pojoToJson(testCaseResolutionStatus));
|
||||||
|
testCaseResolutionStatus.setTestCaseReference(testCaseReference);
|
||||||
|
testCaseResolutionStatusRepository.storeRelationship(testCaseResolutionStatus);
|
||||||
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
|
testCaseResolutionStatusRepository.postCreate(testCaseResolutionStatus);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import java.beans.IntrospectionException;
|
|||||||
import java.beans.Introspector;
|
import java.beans.Introspector;
|
||||||
import java.beans.PropertyDescriptor;
|
import java.beans.PropertyDescriptor;
|
||||||
import java.lang.reflect.InvocationTargetException;
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
@ -14,6 +15,7 @@ import javax.json.JsonPatch;
|
|||||||
import javax.ws.rs.core.Response;
|
import javax.ws.rs.core.Response;
|
||||||
import org.jdbi.v3.sqlobject.transaction.Transaction;
|
import org.jdbi.v3.sqlobject.transaction.Transaction;
|
||||||
import org.openmetadata.schema.EntityInterface;
|
import org.openmetadata.schema.EntityInterface;
|
||||||
|
import org.openmetadata.schema.api.feed.CloseTask;
|
||||||
import org.openmetadata.schema.api.feed.ResolveTask;
|
import org.openmetadata.schema.api.feed.ResolveTask;
|
||||||
import org.openmetadata.schema.entity.feed.Thread;
|
import org.openmetadata.schema.entity.feed.Thread;
|
||||||
import org.openmetadata.schema.entity.teams.User;
|
import org.openmetadata.schema.entity.teams.User;
|
||||||
@ -25,6 +27,7 @@ import org.openmetadata.schema.tests.type.TestCaseResolutionStatus;
|
|||||||
import org.openmetadata.schema.tests.type.TestCaseResolutionStatusTypes;
|
import org.openmetadata.schema.tests.type.TestCaseResolutionStatusTypes;
|
||||||
import org.openmetadata.schema.type.EntityReference;
|
import org.openmetadata.schema.type.EntityReference;
|
||||||
import org.openmetadata.schema.type.Include;
|
import org.openmetadata.schema.type.Include;
|
||||||
|
import org.openmetadata.schema.type.Relationship;
|
||||||
import org.openmetadata.schema.type.TaskDetails;
|
import org.openmetadata.schema.type.TaskDetails;
|
||||||
import org.openmetadata.schema.type.TaskStatus;
|
import org.openmetadata.schema.type.TaskStatus;
|
||||||
import org.openmetadata.schema.type.TaskType;
|
import org.openmetadata.schema.type.TaskType;
|
||||||
@ -53,11 +56,17 @@ public class TestCaseResolutionStatusRepository
|
|||||||
|
|
||||||
public ResultList<TestCaseResolutionStatus> listTestCaseResolutionStatusesForStateId(
|
public ResultList<TestCaseResolutionStatus> listTestCaseResolutionStatusesForStateId(
|
||||||
UUID stateId) {
|
UUID stateId) {
|
||||||
|
List<TestCaseResolutionStatus> testCaseResolutionStatuses = new ArrayList<>();
|
||||||
List<String> jsons =
|
List<String> jsons =
|
||||||
((CollectionDAO.TestCaseResolutionStatusTimeSeriesDAO) timeSeriesDao)
|
((CollectionDAO.TestCaseResolutionStatusTimeSeriesDAO) timeSeriesDao)
|
||||||
.listTestCaseResolutionStatusesForStateId(stateId.toString());
|
.listTestCaseResolutionStatusesForStateId(stateId.toString());
|
||||||
List<TestCaseResolutionStatus> testCaseResolutionStatuses =
|
|
||||||
JsonUtils.readObjects(jsons, TestCaseResolutionStatus.class);
|
for (String json : jsons) {
|
||||||
|
TestCaseResolutionStatus testCaseResolutionStatus =
|
||||||
|
JsonUtils.readValue(json, TestCaseResolutionStatus.class);
|
||||||
|
setInheritedFields(testCaseResolutionStatus);
|
||||||
|
testCaseResolutionStatuses.add(testCaseResolutionStatus);
|
||||||
|
}
|
||||||
|
|
||||||
return getResultList(testCaseResolutionStatuses, null, null, testCaseResolutionStatuses.size());
|
return getResultList(testCaseResolutionStatuses, null, null, testCaseResolutionStatuses.size());
|
||||||
}
|
}
|
||||||
@ -145,11 +154,9 @@ public class TestCaseResolutionStatusRepository
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Transaction
|
@Transaction
|
||||||
public TestCaseResolutionStatus createNewRecord(
|
public void storeInternal(TestCaseResolutionStatus recordEntity, String recordFQN) {
|
||||||
TestCaseResolutionStatus recordEntity, String recordFQN) {
|
|
||||||
|
|
||||||
TestCaseResolutionStatus lastIncident =
|
TestCaseResolutionStatus lastIncident = getLatestRecord(recordFQN);
|
||||||
getLatestRecord(recordEntity.getTestCaseReference().getFullyQualifiedName());
|
|
||||||
|
|
||||||
if (recordEntity.getStateId() == null) {
|
if (recordEntity.getStateId() == null) {
|
||||||
recordEntity.setStateId(UUID.randomUUID());
|
recordEntity.setStateId(UUID.randomUUID());
|
||||||
@ -177,7 +184,7 @@ public class TestCaseResolutionStatusRepository
|
|||||||
case New -> {
|
case New -> {
|
||||||
// If there is already an existing New incident we'll return it
|
// If there is already an existing New incident we'll return it
|
||||||
if (Boolean.TRUE.equals(unresolvedIncident(lastIncident))) {
|
if (Boolean.TRUE.equals(unresolvedIncident(lastIncident))) {
|
||||||
return lastIncident;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case Ack, Assigned -> openOrAssignTask(recordEntity);
|
case Ack, Assigned -> openOrAssignTask(recordEntity);
|
||||||
@ -187,12 +194,33 @@ public class TestCaseResolutionStatusRepository
|
|||||||
// We don't create a new record. The new status will be added via the
|
// We don't create a new record. The new status will be added via the
|
||||||
// TestCaseFailureResolutionTaskWorkflow
|
// TestCaseFailureResolutionTaskWorkflow
|
||||||
// implemented in the TestCaseRepository.
|
// implemented in the TestCaseRepository.
|
||||||
return getLatestRecord(recordEntity.getTestCaseReference().getFullyQualifiedName());
|
return;
|
||||||
}
|
}
|
||||||
default -> throw new IllegalArgumentException(
|
default -> throw new IllegalArgumentException(
|
||||||
String.format("Invalid status %s", recordEntity.getTestCaseResolutionStatusType()));
|
String.format("Invalid status %s", recordEntity.getTestCaseResolutionStatusType()));
|
||||||
}
|
}
|
||||||
return super.createNewRecord(recordEntity, recordFQN);
|
EntityReference testCaseReference = recordEntity.getTestCaseReference();
|
||||||
|
recordEntity.withTestCaseReference(null); // we don't want to store the reference in the record
|
||||||
|
super.storeInternal(recordEntity, recordFQN);
|
||||||
|
recordEntity.withTestCaseReference(testCaseReference);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void storeRelationship(TestCaseResolutionStatus recordEntity) {
|
||||||
|
addRelationship(
|
||||||
|
recordEntity.getTestCaseReference().getId(),
|
||||||
|
recordEntity.getId(),
|
||||||
|
Entity.TEST_CASE,
|
||||||
|
Entity.TEST_CASE_RESOLUTION_STATUS,
|
||||||
|
Relationship.PARENT_OF,
|
||||||
|
null,
|
||||||
|
false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void setInheritedFields(TestCaseResolutionStatus recordEntity) {
|
||||||
|
recordEntity.setTestCaseReference(
|
||||||
|
getFromEntityRef(recordEntity.getId(), Relationship.PARENT_OF, Entity.TEST_CASE, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void openOrAssignTask(TestCaseResolutionStatus incidentStatus) {
|
private void openOrAssignTask(TestCaseResolutionStatus incidentStatus) {
|
||||||
@ -253,17 +281,20 @@ public class TestCaseResolutionStatusRepository
|
|||||||
Thread thread = getIncidentTask(lastIncidentStatus);
|
Thread thread = getIncidentTask(lastIncidentStatus);
|
||||||
|
|
||||||
if (thread != null) {
|
if (thread != null) {
|
||||||
// If there is an existing task, we'll resolve it and create a new incident
|
// If there is an existing task, we'll close it without performing the workflow
|
||||||
// status with the Resolved status flow
|
// (i.e. creating a new incident which will be handled here).
|
||||||
|
FeedRepository.ThreadContext threadContext = new FeedRepository.ThreadContext(thread);
|
||||||
|
threadContext.getThread().getTask().withNewValue(resolveTask.getNewValue());
|
||||||
Entity.getFeedRepository()
|
Entity.getFeedRepository()
|
||||||
.resolveTask(
|
.closeTaskWithoutWorkflow(
|
||||||
new FeedRepository.ThreadContext(thread),
|
threadContext.getThread(), updatedBy.getFullyQualifiedName(), new CloseTask());
|
||||||
updatedBy.getFullyQualifiedName(),
|
|
||||||
resolveTask);
|
|
||||||
} else {
|
|
||||||
// if there is no task, we'll simply create a new incident status (e.g. New -> Resolved)
|
|
||||||
super.createNewRecord(newIncidentStatus, testCase.getFullyQualifiedName());
|
|
||||||
}
|
}
|
||||||
|
// if there is no task, we'll simply create a new incident status (e.g. New -> Resolved)
|
||||||
|
EntityReference testCaseReference = newIncidentStatus.getTestCaseReference();
|
||||||
|
newIncidentStatus.setTestCaseReference(
|
||||||
|
null); // we don't want to store the reference in the record
|
||||||
|
super.storeInternal(newIncidentStatus, testCase.getFullyQualifiedName());
|
||||||
|
newIncidentStatus.setTestCaseReference(testCaseReference);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createTask(
|
private void createTask(
|
||||||
|
@ -2,6 +2,7 @@ package org.openmetadata.service.migration.mysql.v140;
|
|||||||
|
|
||||||
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
|
||||||
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
|
||||||
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTestCaseResolution;
|
||||||
|
|
||||||
import lombok.SneakyThrows;
|
import lombok.SneakyThrows;
|
||||||
import org.jdbi.v3.core.Handle;
|
import org.jdbi.v3.core.Handle;
|
||||||
@ -27,9 +28,13 @@ public class Migration extends MigrationProcessImpl {
|
|||||||
@Override
|
@Override
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
public void runDataMigration() {
|
public void runDataMigration() {
|
||||||
|
// Migrate Table Partition
|
||||||
migrateTablePartition(handle, collectionDAO);
|
migrateTablePartition(handle, collectionDAO);
|
||||||
|
|
||||||
// Migrate Generic to Webhook
|
// Migrate Generic to Webhook
|
||||||
migrateGenericToWebhook(collectionDAO);
|
migrateGenericToWebhook(collectionDAO);
|
||||||
|
|
||||||
|
// Migrate Test case resolution status
|
||||||
|
migrateTestCaseResolution(handle, collectionDAO);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ package org.openmetadata.service.migration.postgres.v140;
|
|||||||
|
|
||||||
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateGenericToWebhook;
|
||||||
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTablePartition;
|
||||||
|
import static org.openmetadata.service.migration.utils.v140.MigrationUtil.migrateTestCaseResolution;
|
||||||
|
|
||||||
import lombok.SneakyThrows;
|
import lombok.SneakyThrows;
|
||||||
import org.jdbi.v3.core.Handle;
|
import org.jdbi.v3.core.Handle;
|
||||||
@ -27,9 +28,13 @@ public class Migration extends MigrationProcessImpl {
|
|||||||
@Override
|
@Override
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
public void runDataMigration() {
|
public void runDataMigration() {
|
||||||
|
// Migrate Table Partition
|
||||||
migrateTablePartition(handle, collectionDAO);
|
migrateTablePartition(handle, collectionDAO);
|
||||||
|
|
||||||
// Migrate Generic to Webhook
|
// Migrate Generic to Webhook
|
||||||
migrateGenericToWebhook(collectionDAO);
|
migrateGenericToWebhook(collectionDAO);
|
||||||
|
|
||||||
|
// Migrate Test case resolution status
|
||||||
|
migrateTestCaseResolution(handle, collectionDAO);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ package org.openmetadata.service.migration.utils.v140;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
import javax.json.Json;
|
import javax.json.Json;
|
||||||
import javax.json.JsonArray;
|
import javax.json.JsonArray;
|
||||||
import javax.json.JsonArrayBuilder;
|
import javax.json.JsonArrayBuilder;
|
||||||
@ -16,9 +17,12 @@ import org.json.JSONObject;
|
|||||||
import org.openmetadata.schema.api.services.CreateDatabaseService;
|
import org.openmetadata.schema.api.services.CreateDatabaseService;
|
||||||
import org.openmetadata.schema.entity.data.Table;
|
import org.openmetadata.schema.entity.data.Table;
|
||||||
import org.openmetadata.schema.entity.events.EventSubscription;
|
import org.openmetadata.schema.entity.events.EventSubscription;
|
||||||
|
import org.openmetadata.schema.tests.type.TestCaseResolutionStatus;
|
||||||
import org.openmetadata.schema.type.PartitionColumnDetails;
|
import org.openmetadata.schema.type.PartitionColumnDetails;
|
||||||
import org.openmetadata.schema.type.PartitionIntervalTypes;
|
import org.openmetadata.schema.type.PartitionIntervalTypes;
|
||||||
|
import org.openmetadata.schema.type.Relationship;
|
||||||
import org.openmetadata.schema.type.TablePartition;
|
import org.openmetadata.schema.type.TablePartition;
|
||||||
|
import org.openmetadata.service.Entity;
|
||||||
import org.openmetadata.service.jdbi3.CollectionDAO;
|
import org.openmetadata.service.jdbi3.CollectionDAO;
|
||||||
import org.openmetadata.service.resources.databases.DatasourceConfig;
|
import org.openmetadata.service.resources.databases.DatasourceConfig;
|
||||||
import org.openmetadata.service.util.JsonUtils;
|
import org.openmetadata.service.util.JsonUtils;
|
||||||
@ -34,6 +38,13 @@ public class MigrationUtil {
|
|||||||
private static final String POSTGRES_QUERY_TABLES_WITH_PARTITION =
|
private static final String POSTGRES_QUERY_TABLES_WITH_PARTITION =
|
||||||
"SELECT json " + "FROM table_entity " + "WHERE json->'tablePartition' IS NOT NULL";
|
"SELECT json " + "FROM table_entity " + "WHERE json->'tablePartition' IS NOT NULL";
|
||||||
|
|
||||||
|
private static final String TEST_CASE_RESOLUTION_QUERY =
|
||||||
|
"SELECT json FROM test_case_resolution_status_time_series";
|
||||||
|
private static final String MYSQL_TEST_CASE_RESOLUTION_UPDATE_QUERY =
|
||||||
|
"UPDATE test_case_resolution_status_time_series SET json = :json WHERE id = :id";
|
||||||
|
private static final String POSTGRES_TEST_CASE_RESOLUTION_UPDATE_QUERY =
|
||||||
|
"UPDATE test_case_resolution_status_time_series SET json = :json::jsonb WHERE id = :id";
|
||||||
|
|
||||||
private MigrationUtil() {
|
private MigrationUtil() {
|
||||||
/* Cannot create object util class*/
|
/* Cannot create object util class*/
|
||||||
}
|
}
|
||||||
@ -64,6 +75,50 @@ public class MigrationUtil {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void migrateTestCaseResolution(Handle handle, CollectionDAO collectionDAO) {
|
||||||
|
try {
|
||||||
|
handle
|
||||||
|
.createQuery(TEST_CASE_RESOLUTION_QUERY)
|
||||||
|
.mapToMap()
|
||||||
|
.forEach(
|
||||||
|
row -> {
|
||||||
|
try {
|
||||||
|
TestCaseResolutionStatus testCaseResolutionStatus =
|
||||||
|
JsonUtils.readValue(
|
||||||
|
row.get("json").toString(), TestCaseResolutionStatus.class);
|
||||||
|
UUID fromId = testCaseResolutionStatus.getTestCaseReference().getId();
|
||||||
|
UUID toId = testCaseResolutionStatus.getId();
|
||||||
|
// Store the test case <-> incident relationship
|
||||||
|
collectionDAO
|
||||||
|
.relationshipDAO()
|
||||||
|
.insert(
|
||||||
|
fromId,
|
||||||
|
toId,
|
||||||
|
Entity.TEST_CASE,
|
||||||
|
Entity.TEST_CASE_RESOLUTION_STATUS,
|
||||||
|
Relationship.PARENT_OF.ordinal(),
|
||||||
|
null);
|
||||||
|
// Remove the test case reference from the test case resolution status
|
||||||
|
testCaseResolutionStatus.setTestCaseReference(null);
|
||||||
|
String json = JsonUtils.pojoToJson(testCaseResolutionStatus);
|
||||||
|
String updateQuery = MYSQL_TEST_CASE_RESOLUTION_UPDATE_QUERY;
|
||||||
|
if (Boolean.FALSE.equals(DatasourceConfig.getInstance().isMySQL())) {
|
||||||
|
updateQuery = POSTGRES_TEST_CASE_RESOLUTION_UPDATE_QUERY;
|
||||||
|
}
|
||||||
|
handle
|
||||||
|
.createUpdate(updateQuery)
|
||||||
|
.bind("json", json)
|
||||||
|
.bind("id", toId.toString())
|
||||||
|
.execute();
|
||||||
|
} catch (Exception ex) {
|
||||||
|
LOG.warn("Error during the test case resolution migration due to ", ex);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (Exception ex) {
|
||||||
|
LOG.warn("Error running the test case resolution migration ", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void migrateTablePartition(Handle handle, CollectionDAO collectionDAO) {
|
public static void migrateTablePartition(Handle handle, CollectionDAO collectionDAO) {
|
||||||
try {
|
try {
|
||||||
if (Boolean.TRUE.equals(DatasourceConfig.getInstance().isMySQL())) {
|
if (Boolean.TRUE.equals(DatasourceConfig.getInstance().isMySQL())) {
|
||||||
|
@ -737,6 +737,11 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
|
|||||||
@QueryParam("hardDelete")
|
@QueryParam("hardDelete")
|
||||||
@DefaultValue("false")
|
@DefaultValue("false")
|
||||||
boolean hardDelete,
|
boolean hardDelete,
|
||||||
|
@Parameter(
|
||||||
|
description = "Recursively delete this entity and it's children. (Default `false`)")
|
||||||
|
@DefaultValue("false")
|
||||||
|
@QueryParam("recursive")
|
||||||
|
boolean recursive,
|
||||||
@Parameter(description = "Id of the test case", schema = @Schema(type = "UUID"))
|
@Parameter(description = "Id of the test case", schema = @Schema(type = "UUID"))
|
||||||
@PathParam("id")
|
@PathParam("id")
|
||||||
UUID id) {
|
UUID id) {
|
||||||
@ -746,7 +751,7 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
|
|||||||
OperationContext operationContext =
|
OperationContext operationContext =
|
||||||
new OperationContext(Entity.TABLE, MetadataOperation.EDIT_TESTS);
|
new OperationContext(Entity.TABLE, MetadataOperation.EDIT_TESTS);
|
||||||
authorizer.authorize(securityContext, operationContext, resourceContext);
|
authorizer.authorize(securityContext, operationContext, resourceContext);
|
||||||
return delete(uriInfo, securityContext, id, false, hardDelete);
|
return delete(uriInfo, securityContext, id, recursive, hardDelete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@DELETE
|
@DELETE
|
||||||
@ -766,12 +771,17 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
|
|||||||
@QueryParam("hardDelete")
|
@QueryParam("hardDelete")
|
||||||
@DefaultValue("false")
|
@DefaultValue("false")
|
||||||
boolean hardDelete,
|
boolean hardDelete,
|
||||||
|
@Parameter(
|
||||||
|
description = "Recursively delete this entity and it's children. (Default `false`)")
|
||||||
|
@DefaultValue("false")
|
||||||
|
@QueryParam("recursive")
|
||||||
|
boolean recursive,
|
||||||
@Parameter(
|
@Parameter(
|
||||||
description = "Fully qualified name of the test case",
|
description = "Fully qualified name of the test case",
|
||||||
schema = @Schema(type = "string"))
|
schema = @Schema(type = "string"))
|
||||||
@PathParam("fqn")
|
@PathParam("fqn")
|
||||||
String fqn) {
|
String fqn) {
|
||||||
return deleteByName(uriInfo, securityContext, fqn, false, hardDelete);
|
return deleteByName(uriInfo, securityContext, fqn, recursive, hardDelete);
|
||||||
}
|
}
|
||||||
|
|
||||||
@DELETE
|
@DELETE
|
||||||
|
@ -502,6 +502,26 @@ public class SearchRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void deleteTimeSeriesEntityById(EntityTimeSeriesInterface entity) {
|
||||||
|
if (entity != null) {
|
||||||
|
String entityId = entity.getId().toString();
|
||||||
|
String entityType = entity.getEntityReference().getType();
|
||||||
|
IndexMapping indexMapping = entityIndexMap.get(entityType);
|
||||||
|
try {
|
||||||
|
searchClient.deleteEntity(indexMapping.getIndexName(clusterAlias), entityId);
|
||||||
|
} catch (Exception ie) {
|
||||||
|
LOG.error(
|
||||||
|
String.format(
|
||||||
|
"Issue in Deleting the search document for entityID [%s] and entityType [%s]. Reason[%s], Cause[%s], Stack [%s]",
|
||||||
|
entityId,
|
||||||
|
entityType,
|
||||||
|
ie.getMessage(),
|
||||||
|
ie.getCause(),
|
||||||
|
ExceptionUtils.getStackTrace(ie)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void softDeleteOrRestoreEntity(EntityInterface entity, boolean delete) {
|
public void softDeleteOrRestoreEntity(EntityInterface entity, boolean delete) {
|
||||||
if (entity != null) {
|
if (entity != null) {
|
||||||
String entityId = entity.getId().toString();
|
String entityId = entity.getId().toString();
|
||||||
|
@ -191,8 +191,59 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"updatedBy": {
|
"updatedBy": {
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 36
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"displayName": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fullyQualifiedName": {
|
||||||
"type": "text"
|
"type": "text"
|
||||||
},
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "om_analyzer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"deleted": {
|
||||||
|
"type": "text"
|
||||||
|
},
|
||||||
|
"href": {
|
||||||
|
"type": "text"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"updatedAt": {
|
"updatedAt": {
|
||||||
"type": "date"
|
"type": "date"
|
||||||
},
|
},
|
||||||
|
@ -205,8 +205,59 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"updatedBy": {
|
"updatedBy": {
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 36
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"displayName": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fullyQualifiedName": {
|
||||||
"type": "text"
|
"type": "text"
|
||||||
},
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "om_analyzer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"deleted": {
|
||||||
|
"type": "text"
|
||||||
|
},
|
||||||
|
"href": {
|
||||||
|
"type": "text"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"updatedAt": {
|
"updatedAt": {
|
||||||
"type": "date"
|
"type": "date"
|
||||||
},
|
},
|
||||||
|
@ -195,8 +195,59 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"updatedBy": {
|
"updatedBy": {
|
||||||
|
"properties": {
|
||||||
|
"id": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 36
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"displayName": {
|
||||||
|
"type": "keyword",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"ignore_above": 256
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fullyQualifiedName": {
|
||||||
"type": "text"
|
"type": "text"
|
||||||
},
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "om_analyzer",
|
||||||
|
"fields": {
|
||||||
|
"keyword": {
|
||||||
|
"type": "keyword",
|
||||||
|
"normalizer": "lowercase_normalizer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"deleted": {
|
||||||
|
"type": "text"
|
||||||
|
},
|
||||||
|
"href": {
|
||||||
|
"type": "text"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"updatedAt": {
|
"updatedAt": {
|
||||||
"type": "date"
|
"type": "date"
|
||||||
},
|
},
|
||||||
|
@ -1333,6 +1333,25 @@ public class TestCaseResourceTest extends EntityResourceTest<TestCase, CreateTes
|
|||||||
assertEquals(
|
assertEquals(
|
||||||
TestCaseResolutionStatusTypes.Ack,
|
TestCaseResolutionStatusTypes.Ack,
|
||||||
storedTestCaseResolutions.getData().get(0).getTestCaseResolutionStatusType());
|
storedTestCaseResolutions.getData().get(0).getTestCaseResolutionStatusType());
|
||||||
|
|
||||||
|
// Delete test case recursively and check that the test case resolution status is also deleted
|
||||||
|
// 1. soft delete - should not delete the test case resolution status
|
||||||
|
// 2. hard delete - should delete the test case resolution status
|
||||||
|
deleteEntity(testCaseEntity1.getId(), true, false, ADMIN_AUTH_HEADERS);
|
||||||
|
storedTestCaseResolutions =
|
||||||
|
getTestCaseFailureStatus(startTs, endTs, null, TestCaseResolutionStatusTypes.Ack);
|
||||||
|
assertEquals(2, storedTestCaseResolutions.getData().size());
|
||||||
|
assertTrue(
|
||||||
|
storedTestCaseResolutions.getData().stream()
|
||||||
|
.anyMatch(t -> t.getTestCaseReference().getId().equals(testCaseEntity1.getId())));
|
||||||
|
|
||||||
|
deleteEntity(testCaseEntity1.getId(), true, true, ADMIN_AUTH_HEADERS);
|
||||||
|
storedTestCaseResolutions =
|
||||||
|
getTestCaseFailureStatus(startTs, endTs, null, TestCaseResolutionStatusTypes.Ack);
|
||||||
|
assertEquals(1, storedTestCaseResolutions.getData().size());
|
||||||
|
assertTrue(
|
||||||
|
storedTestCaseResolutions.getData().stream()
|
||||||
|
.noneMatch(t -> t.getTestCaseReference().getId().equals(testCaseEntity1.getId())));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -485,6 +485,7 @@ const DataQualityTab: React.FC<DataQualityTabProps> = ({
|
|||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<DeleteWidgetModal
|
<DeleteWidgetModal
|
||||||
|
isRecursiveDelete
|
||||||
afterDeleteAction={afterDeleteAction}
|
afterDeleteAction={afterDeleteAction}
|
||||||
allowSoftDelete={false}
|
allowSoftDelete={false}
|
||||||
entityId={selectedTestCase?.data?.id ?? ''}
|
entityId={selectedTestCase?.data?.id ?? ''}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user