mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2026-01-06 04:26:57 +00:00
Fix: #19907 Glossary Term Approval Workflow - Trigger Filters - JsonLogic along with Exclude Fields filter (#22437)
* Draft Implementation of Auto approve Glossary Terms - JsonLogicFilter * Update generated TypeScript types * Fixed Issues: PassFilter Inversion when jsonLogic is true, Unescaping JsonFilter * Corrected the Test Expectations and fixed the test * Fixed the Test for auto approval * Fixed Java Checkstyle * Added Entity to the jsonLogic Map for rule application so that other operations on entity is also used in jsonLogic * Implementation of Code Review Changes 1. Use the Singleton Rule Engine and add Operations 2. Use Migrations and remove the backward Compatibility * Update generated TypeScript types * Removed Flowable variable updatedBy * Update CheckEntityAttributesImpl to use Singleton rule instance * Final Design: Improved Json Logic Triggers : isChanged, isReviewer, isOwner * Fixed Issue: Extracting var from isChanged rule * Fixed Issue: Fixed comment and trigger properties for entityBasedTrigger * Hybrid approach: Exclude Fields + JsonLogic * Update generated TypeScript types * Added a lot of test cases! * Auto Approve Glossary Terms during creation to avoid forever DRAFT state * Added nodes for auto approve for reviewers * Revert starting terms in DRAFT * Adding Migrations to take the fresh data from the seed config, since our default workflow definitions have been changed. * Handled Migrations properly for glossary term auto approve nodes * Fix Merge Conflicts - Updated MigrationUtil, Fixed GlossaryStates test with newly added nodes * Add Trigger filter to have empty string * Fix Java checkstyle * Updated the JsonLogic apply to incorporate RuleEngine changes * Update the default rule for checkglossaryterm has reviewers node in governance workflow * add migration logic * Fix Flaky Optimistic Lock exception in the test case * add tests * Fix: 1. Propagate updatedBy from the trigger and make the updatedBy to be a reviewer in case of auto approval by reviewer. 2. Multiple Workflow Instance Conflicts resolution * Remove early no conflict return * revert search provider changes --------- Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com> Co-authored-by: Karan Hotchandani <33024356+karanh37@users.noreply.github.com> Co-authored-by: karanh37 <karanh37@gmail.com> Co-authored-by: sonika-shah <58761340+sonika-shah@users.noreply.github.com>
This commit is contained in:
parent
78839892b6
commit
0aa9238a72
@ -3,6 +3,7 @@ package org.openmetadata.service.governance.workflows;
|
||||
import static org.openmetadata.schema.entity.events.SubscriptionDestination.SubscriptionType.GOVERNANCE_WORKFLOW_CHANGE_EVENT;
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.GLOBAL_NAMESPACE;
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.RELATED_ENTITY_VARIABLE;
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.UPDATED_BY_VARIABLE;
|
||||
import static org.openmetadata.service.governance.workflows.WorkflowVariableHandler.getNamespacedVariableName;
|
||||
|
||||
import java.util.HashMap;
|
||||
@ -68,6 +69,13 @@ public class WorkflowEventConsumer implements Destination<ChangeEvent> {
|
||||
getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE),
|
||||
entityLink.getLinkString());
|
||||
|
||||
// Set the updatedBy variable from the change event userName
|
||||
if (event.getUserName() != null) {
|
||||
variables.put(
|
||||
getNamespacedVariableName(GLOBAL_NAMESPACE, UPDATED_BY_VARIABLE),
|
||||
event.getUserName());
|
||||
}
|
||||
|
||||
WorkflowHandler.getInstance().triggerWithSignal(signal, variables);
|
||||
}
|
||||
} catch (Exception exc) {
|
||||
|
||||
@ -436,4 +436,8 @@ public class WorkflowHandler {
|
||||
runtimeService.deleteProcessInstance(
|
||||
instance.getId(), "Terminating all instances due to user request."));
|
||||
}
|
||||
|
||||
public RuntimeService getRuntimeService() {
|
||||
return processEngine.getRuntimeService();
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,132 @@
|
||||
package org.openmetadata.service.governance.workflows;
|
||||
|
||||
import static org.openmetadata.service.governance.workflows.elements.TriggerFactory.getMainWorkflowDefinitionNameFromTrigger;
|
||||
|
||||
import java.util.List;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.engine.RuntimeService;
|
||||
import org.flowable.engine.runtime.ProcessInstance;
|
||||
import org.jdbi.v3.core.transaction.TransactionIsolationLevel;
|
||||
import org.openmetadata.schema.governance.workflows.WorkflowInstance;
|
||||
import org.openmetadata.service.Entity;
|
||||
import org.openmetadata.service.jdbi3.ListFilter;
|
||||
import org.openmetadata.service.jdbi3.WorkflowDefinitionRepository;
|
||||
import org.openmetadata.service.jdbi3.WorkflowInstanceRepository;
|
||||
import org.openmetadata.service.jdbi3.WorkflowInstanceStateRepository;
|
||||
import org.openmetadata.service.util.EntityUtil;
|
||||
import org.openmetadata.service.util.ResultList;
|
||||
|
||||
@Slf4j
|
||||
public class WorkflowUtils {
|
||||
|
||||
/**
|
||||
* Terminates conflicting workflow instances of a specific type for an entity.
|
||||
* Orchestrates both Flowable termination and OpenMetadata database updates.
|
||||
*/
|
||||
public static void terminateConflictingInstances(
|
||||
String triggerWorkflowDefinitionKey, String entityLink, String currentProcessInstanceId) {
|
||||
try {
|
||||
// Convert trigger name to main workflow name (e.g., "GlossaryTermApprovalWorkflowTrigger" →
|
||||
// "GlossaryTermApprovalWorkflow")
|
||||
String mainWorkflowDefinitionName =
|
||||
getMainWorkflowDefinitionNameFromTrigger(triggerWorkflowDefinitionKey);
|
||||
WorkflowInstanceRepository workflowInstanceRepository =
|
||||
(WorkflowInstanceRepository)
|
||||
Entity.getEntityTimeSeriesRepository(Entity.WORKFLOW_INSTANCE);
|
||||
WorkflowInstanceStateRepository workflowInstanceStateRepository =
|
||||
(WorkflowInstanceStateRepository)
|
||||
Entity.getEntityTimeSeriesRepository(Entity.WORKFLOW_INSTANCE_STATE);
|
||||
|
||||
// Query for running instances of this workflow type for this entity
|
||||
ListFilter filter = new ListFilter(null);
|
||||
filter.addQueryParam("entityLink", entityLink);
|
||||
|
||||
long endTs = System.currentTimeMillis();
|
||||
long startTs = endTs - (7L * 24 * 60 * 60 * 1000);
|
||||
|
||||
ResultList<WorkflowInstance> allInstances =
|
||||
workflowInstanceRepository.list(null, startTs, endTs, 100, filter, false);
|
||||
|
||||
// Filter to running instances of this specific workflow type
|
||||
List<WorkflowInstance> candidateInstances =
|
||||
allInstances.getData().stream()
|
||||
.filter(
|
||||
instance -> WorkflowInstance.WorkflowStatus.RUNNING.equals(instance.getStatus()))
|
||||
.filter(
|
||||
instance -> {
|
||||
try {
|
||||
WorkflowDefinitionRepository repo =
|
||||
(WorkflowDefinitionRepository)
|
||||
Entity.getEntityRepository(Entity.WORKFLOW_DEFINITION);
|
||||
var def =
|
||||
repo.get(
|
||||
null,
|
||||
instance.getWorkflowDefinitionId(),
|
||||
EntityUtil.Fields.EMPTY_FIELDS);
|
||||
return mainWorkflowDefinitionName.equals(def.getName());
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
})
|
||||
.toList();
|
||||
|
||||
// Check if there are multiple instances using Flowable's API
|
||||
RuntimeService runtimeService = WorkflowHandler.getInstance().getRuntimeService();
|
||||
List<ProcessInstance> runningProcessInstances =
|
||||
runtimeService
|
||||
.createProcessInstanceQuery()
|
||||
.processDefinitionKey(mainWorkflowDefinitionName)
|
||||
.list();
|
||||
// Find instances to terminate (exclude the current one by process instance ID)
|
||||
List<WorkflowInstance> conflictingInstances =
|
||||
candidateInstances.stream()
|
||||
.filter(
|
||||
instance -> {
|
||||
// Check if this WorkflowInstance corresponds to a different process instance
|
||||
return runningProcessInstances.stream()
|
||||
.filter(pi -> !pi.getId().equals(currentProcessInstanceId))
|
||||
.anyMatch(pi -> pi.getBusinessKey().equals(instance.getId().toString()));
|
||||
})
|
||||
.toList();
|
||||
|
||||
if (conflictingInstances.isEmpty()) {
|
||||
LOG.debug("No conflicting instances found to terminate for {}", mainWorkflowDefinitionName);
|
||||
return;
|
||||
}
|
||||
// Execute everything in a single transaction for atomicity
|
||||
Entity.getJdbi()
|
||||
.inTransaction(
|
||||
TransactionIsolationLevel.READ_COMMITTED,
|
||||
handle -> {
|
||||
try {
|
||||
// 1. Terminate Flowable processes using existing handler method
|
||||
WorkflowHandler.getInstance().terminateWorkflow(mainWorkflowDefinitionName);
|
||||
|
||||
// 2. Mark OpenMetadata instances and states as FAILURE
|
||||
for (WorkflowInstance instance : conflictingInstances) {
|
||||
workflowInstanceStateRepository.markInstanceStatesAsFailed(
|
||||
instance.getId(), "Terminated due to conflicting workflow instance");
|
||||
workflowInstanceRepository.markInstanceAsFailed(
|
||||
instance.getId(), "Terminated due to conflicting workflow instance");
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
LOG.error(
|
||||
"Failed to terminate conflicting instances in transaction: {}",
|
||||
e.getMessage());
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
|
||||
LOG.info(
|
||||
"Terminated {} conflicting instances of {} for entity {}",
|
||||
conflictingInstances.size(),
|
||||
mainWorkflowDefinitionName,
|
||||
entityLink);
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Failed to terminate conflicting instances: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -30,4 +30,9 @@ public class TriggerFactory {
|
||||
public static String getTriggerWorkflowId(String workflowFQN) {
|
||||
return String.format("%sTrigger", workflowFQN);
|
||||
}
|
||||
|
||||
public static String getMainWorkflowDefinitionNameFromTrigger(
|
||||
String triggerWorkflowDefinitionName) {
|
||||
return triggerWorkflowDefinitionName.replaceFirst("Trigger$", "");
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,8 +6,6 @@ import static org.openmetadata.service.governance.workflows.Workflow.RESULT_VARI
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.WORKFLOW_RUNTIME_EXCEPTION;
|
||||
import static org.openmetadata.service.governance.workflows.WorkflowHandler.getProcessDefinitionKeyFromId;
|
||||
|
||||
import io.github.jamsesso.jsonlogic.JsonLogic;
|
||||
import io.github.jamsesso.jsonlogic.JsonLogicException;
|
||||
import java.util.Map;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang.exception.ExceptionUtils;
|
||||
@ -21,6 +19,7 @@ import org.openmetadata.schema.utils.JsonUtils;
|
||||
import org.openmetadata.service.Entity;
|
||||
import org.openmetadata.service.governance.workflows.WorkflowVariableHandler;
|
||||
import org.openmetadata.service.resources.feeds.MessageParser;
|
||||
import org.openmetadata.service.rules.RuleEngine;
|
||||
|
||||
@Slf4j
|
||||
public class CheckEntityAttributesImpl implements JavaDelegate {
|
||||
@ -53,15 +52,12 @@ public class CheckEntityAttributesImpl implements JavaDelegate {
|
||||
private Boolean checkAttributes(MessageParser.EntityLink entityLink, String rules) {
|
||||
EntityInterface entity = Entity.getEntity(entityLink, "*", Include.ALL);
|
||||
|
||||
JsonLogic jsonLogic = new JsonLogic();
|
||||
boolean result = false;
|
||||
|
||||
boolean result;
|
||||
try {
|
||||
result = (boolean) jsonLogic.apply(rules, JsonUtils.getMap(entity));
|
||||
} catch (JsonLogicException e) {
|
||||
result = (boolean) RuleEngine.getInstance().apply(rules, JsonUtils.getMap(entity));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@ -10,7 +10,9 @@ import static org.openmetadata.service.governance.workflows.WorkflowVariableHand
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Set;
|
||||
import lombok.Getter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.bpmn.model.BoundaryEvent;
|
||||
import org.flowable.bpmn.model.BpmnModel;
|
||||
import org.flowable.bpmn.model.CallActivity;
|
||||
@ -25,6 +27,7 @@ import org.flowable.bpmn.model.ServiceTask;
|
||||
import org.flowable.bpmn.model.Signal;
|
||||
import org.flowable.bpmn.model.SignalEventDefinition;
|
||||
import org.flowable.bpmn.model.StartEvent;
|
||||
import org.openmetadata.schema.governance.workflows.elements.triggers.Config;
|
||||
import org.openmetadata.schema.governance.workflows.elements.triggers.Event;
|
||||
import org.openmetadata.schema.governance.workflows.elements.triggers.EventBasedEntityTriggerDefinition;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
@ -37,6 +40,7 @@ import org.openmetadata.service.governance.workflows.flowable.builders.ServiceTa
|
||||
import org.openmetadata.service.governance.workflows.flowable.builders.SignalBuilder;
|
||||
import org.openmetadata.service.governance.workflows.flowable.builders.StartEventBuilder;
|
||||
|
||||
@Slf4j
|
||||
public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
private final Process process;
|
||||
@Getter private final String triggerWorkflowId;
|
||||
@ -45,10 +49,13 @@ public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
|
||||
public static String PASSES_FILTER_VARIABLE = "passesFilter";
|
||||
|
||||
private final EventBasedEntityTriggerDefinition triggerDefinition;
|
||||
|
||||
public EventBasedEntityTrigger(
|
||||
String mainWorkflowName,
|
||||
String triggerWorkflowId,
|
||||
EventBasedEntityTriggerDefinition triggerDefinition) {
|
||||
this.triggerDefinition = triggerDefinition;
|
||||
Process process = new Process();
|
||||
process.setId(triggerWorkflowId);
|
||||
process.setName(triggerWorkflowId);
|
||||
@ -59,7 +66,8 @@ public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
ServiceTask filterTask = getFilterTask(triggerWorkflowId, triggerDefinition);
|
||||
process.addFlowElement(filterTask);
|
||||
|
||||
CallActivity workflowTrigger = getWorkflowTrigger(triggerWorkflowId, mainWorkflowName);
|
||||
CallActivity workflowTrigger =
|
||||
getWorkflowTrigger(triggerWorkflowId, mainWorkflowName, triggerDefinition.getOutput());
|
||||
process.addFlowElement(workflowTrigger);
|
||||
|
||||
ErrorEventDefinition runtimeExceptionDefinition = new ErrorEventDefinition();
|
||||
@ -141,7 +149,8 @@ public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
}
|
||||
}
|
||||
|
||||
private CallActivity getWorkflowTrigger(String triggerWorkflowId, String mainWorkflowName) {
|
||||
private CallActivity getWorkflowTrigger(
|
||||
String triggerWorkflowId, String mainWorkflowName, Set<String> triggerOutputs) {
|
||||
CallActivity workflowTrigger =
|
||||
new CallActivityBuilder()
|
||||
.id(getFlowableElementId(triggerWorkflowId, "workflowTrigger"))
|
||||
@ -149,15 +158,33 @@ public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
.inheritBusinessKey(true)
|
||||
.build();
|
||||
|
||||
IOParameter inputParameter = new IOParameter();
|
||||
inputParameter.setSource(getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE));
|
||||
inputParameter.setTarget(getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE));
|
||||
List<IOParameter> inputParameters = new ArrayList<>();
|
||||
|
||||
// ALWAYS pass relatedEntity for backward compatibility
|
||||
IOParameter relatedEntityParam = new IOParameter();
|
||||
relatedEntityParam.setSource(
|
||||
getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE));
|
||||
relatedEntityParam.setTarget(
|
||||
getNamespacedVariableName(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE));
|
||||
inputParameters.add(relatedEntityParam);
|
||||
|
||||
// Dynamically add any additional outputs declared in trigger - Eg updatedBy in
|
||||
// GlossaryTermApprovalWorkflow
|
||||
for (String triggerOutput : triggerOutputs) {
|
||||
if (!RELATED_ENTITY_VARIABLE.equals(
|
||||
triggerOutput)) { // Skip relatedEntity (already added above)
|
||||
IOParameter inputParameter = new IOParameter();
|
||||
inputParameter.setSource(getNamespacedVariableName(GLOBAL_NAMESPACE, triggerOutput));
|
||||
inputParameter.setTarget(getNamespacedVariableName(GLOBAL_NAMESPACE, triggerOutput));
|
||||
inputParameters.add(inputParameter);
|
||||
}
|
||||
}
|
||||
|
||||
IOParameter outputParameter = new IOParameter();
|
||||
outputParameter.setSource(getNamespacedVariableName(GLOBAL_NAMESPACE, EXCEPTION_VARIABLE));
|
||||
outputParameter.setTarget(getNamespacedVariableName(GLOBAL_NAMESPACE, EXCEPTION_VARIABLE));
|
||||
|
||||
workflowTrigger.setInParameters(List.of(inputParameter));
|
||||
workflowTrigger.setInParameters(inputParameters);
|
||||
workflowTrigger.setOutParameters(List.of(outputParameter));
|
||||
|
||||
return workflowTrigger;
|
||||
@ -165,18 +192,34 @@ public class EventBasedEntityTrigger implements TriggerInterface {
|
||||
|
||||
private ServiceTask getFilterTask(
|
||||
String workflowTriggerId, EventBasedEntityTriggerDefinition triggerDefinition) {
|
||||
FieldExtension excludedFilterExpr =
|
||||
new FieldExtensionBuilder()
|
||||
.fieldName("excludedFilterExpr")
|
||||
.fieldValue(JsonUtils.pojoToJson(triggerDefinition.getConfig().getExclude()))
|
||||
.build();
|
||||
|
||||
ServiceTask serviceTask =
|
||||
new ServiceTaskBuilder()
|
||||
.id(getFlowableElementId(workflowTriggerId, "filterTask"))
|
||||
.implementation(FilterEntityImpl.class.getName())
|
||||
.build();
|
||||
serviceTask.getFieldExtensions().add(excludedFilterExpr);
|
||||
|
||||
Config triggerConfig = triggerDefinition.getConfig();
|
||||
|
||||
if (triggerConfig != null) {
|
||||
if (triggerConfig.getExclude() != null && !triggerConfig.getExclude().isEmpty()) {
|
||||
FieldExtension excludedFilterExpr =
|
||||
new FieldExtensionBuilder()
|
||||
.fieldName("excludedFieldsExpr")
|
||||
.fieldValue(JsonUtils.pojoToJson(triggerDefinition.getConfig().getExclude()))
|
||||
.build();
|
||||
serviceTask.getFieldExtensions().add(excludedFilterExpr);
|
||||
}
|
||||
if (triggerConfig.getFilter() != null && !triggerConfig.getFilter().trim().isEmpty()) {
|
||||
// Use JSON Logic path
|
||||
FieldExtension filterExpr =
|
||||
new FieldExtensionBuilder()
|
||||
.fieldName("filterExpr")
|
||||
.fieldValue(JsonUtils.pojoToJson(triggerConfig.getFilter()))
|
||||
.build();
|
||||
serviceTask.getFieldExtensions().add(filterExpr);
|
||||
}
|
||||
}
|
||||
|
||||
return serviceTask;
|
||||
}
|
||||
|
||||
@ -15,42 +15,84 @@ import org.openmetadata.schema.type.FieldChange;
|
||||
import org.openmetadata.schema.type.Include;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
import org.openmetadata.service.Entity;
|
||||
import org.openmetadata.service.governance.workflows.WorkflowHandler;
|
||||
import org.openmetadata.service.governance.workflows.WorkflowUtils;
|
||||
import org.openmetadata.service.governance.workflows.WorkflowVariableHandler;
|
||||
import org.openmetadata.service.resources.feeds.MessageParser;
|
||||
import org.openmetadata.service.rules.RuleEngine;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class FilterEntityImpl implements JavaDelegate {
|
||||
private Expression excludedFilterExpr;
|
||||
private static final Logger log = LoggerFactory.getLogger(FilterEntityImpl.class);
|
||||
private Expression excludedFieldsExpr;
|
||||
private Expression filterExpr;
|
||||
|
||||
@Override
|
||||
public void execute(DelegateExecution execution) {
|
||||
WorkflowVariableHandler varHandler = new WorkflowVariableHandler(execution);
|
||||
List<String> excludedFilter =
|
||||
JsonUtils.readOrConvertValue(excludedFilterExpr.getValue(execution), List.class);
|
||||
|
||||
JsonUtils.readOrConvertValue(excludedFieldsExpr.getValue(execution), List.class);
|
||||
String filterLogic = null;
|
||||
if (filterExpr != null) {
|
||||
filterLogic = (String) filterExpr.getValue(execution);
|
||||
}
|
||||
String entityLinkStr =
|
||||
(String) varHandler.getNamespacedVariable(GLOBAL_NAMESPACE, RELATED_ENTITY_VARIABLE);
|
||||
boolean passesFilter = passesExcludedFilter(entityLinkStr, excludedFilter, filterLogic);
|
||||
|
||||
execution.setVariable(
|
||||
PASSES_FILTER_VARIABLE, passesExcludedFilter(entityLinkStr, excludedFilter));
|
||||
// If this workflow should run for this entity, terminate any previous running instances
|
||||
// of THIS SPECIFIC workflow to prevent conflicts (but NOT the current instance)
|
||||
if (passesFilter) {
|
||||
String triggerWorkflowDefinitionKey =
|
||||
WorkflowHandler.getProcessDefinitionKeyFromId(execution.getProcessDefinitionId());
|
||||
String currentProcessInstanceId = execution.getProcessInstanceId();
|
||||
WorkflowUtils.terminateConflictingInstances(
|
||||
triggerWorkflowDefinitionKey, entityLinkStr, currentProcessInstanceId);
|
||||
}
|
||||
|
||||
log.debug("Trigger Glossary Term Approval Workflow: {}", passesFilter);
|
||||
execution.setVariable(PASSES_FILTER_VARIABLE, passesFilter);
|
||||
}
|
||||
|
||||
private boolean passesExcludedFilter(String entityLinkStr, List<String> excludedFilter) {
|
||||
private boolean passesExcludedFilter(
|
||||
String entityLinkStr, List<String> excludedFilter, String filterLogic) {
|
||||
MessageParser.EntityLink entityLink = MessageParser.EntityLink.parse(entityLinkStr);
|
||||
EntityInterface entity = Entity.getEntity(entityLink, "*", Include.ALL);
|
||||
|
||||
boolean excludeFieldFilter;
|
||||
Optional<ChangeDescription> oChangeDescription =
|
||||
Optional.ofNullable(entity.getChangeDescription());
|
||||
|
||||
// ChangeDescription is empty means it is a Create event.
|
||||
if (oChangeDescription.isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
ChangeDescription changeDescription = oChangeDescription.get();
|
||||
excludeFieldFilter = true;
|
||||
} else {
|
||||
ChangeDescription changeDescription = oChangeDescription.get();
|
||||
|
||||
List<FieldChange> changedFields = changeDescription.getFieldsAdded();
|
||||
changedFields.addAll(changeDescription.getFieldsDeleted());
|
||||
changedFields.addAll(changeDescription.getFieldsUpdated());
|
||||
return changedFields.isEmpty()
|
||||
|| changedFields.stream()
|
||||
.anyMatch(changedField -> !excludedFilter.contains(changedField.getName()));
|
||||
List<FieldChange> changedFields = changeDescription.getFieldsAdded();
|
||||
changedFields.addAll(changeDescription.getFieldsDeleted());
|
||||
changedFields.addAll(changeDescription.getFieldsUpdated());
|
||||
excludeFieldFilter =
|
||||
changedFields.isEmpty()
|
||||
|| changedFields.stream()
|
||||
.anyMatch(changedField -> !excludedFilter.contains(changedField.getName()));
|
||||
}
|
||||
|
||||
// If excludeFields are there in change description, then don't even trigger workflow or
|
||||
// evaluate jsonLogic, so return false to not trigger workflow
|
||||
if (!excludeFieldFilter) return false;
|
||||
// If excludeFields are not there in change description, then evaluate jsonLogic, if jsonLogic
|
||||
// evaluates to true, then don't trigger the workflow, send false
|
||||
boolean jsonFilter;
|
||||
if (filterLogic != null && !filterLogic.trim().isEmpty()) {
|
||||
jsonFilter =
|
||||
(Boolean.TRUE.equals(
|
||||
RuleEngine.getInstance().apply(filterLogic, JsonUtils.getMap(entity))));
|
||||
} else {
|
||||
jsonFilter = false; // No filter means pass
|
||||
}
|
||||
|
||||
return !jsonFilter;
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,4 +109,21 @@ public class WorkflowInstanceRepository extends EntityTimeSeriesRepository<Workf
|
||||
|
||||
getTimeSeriesDao().update(JsonUtils.pojoToJson(workflowInstance), workflowInstanceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks a workflow instance as FAILED with the given reason.
|
||||
* Preserves audit trail instead of deleting the instance.
|
||||
*/
|
||||
public void markInstanceAsFailed(UUID workflowInstanceId, String reason) {
|
||||
WorkflowInstance workflowInstance =
|
||||
JsonUtils.readValue(timeSeriesDao.getById(workflowInstanceId), WorkflowInstance.class);
|
||||
|
||||
WorkflowInstance updatedInstance =
|
||||
workflowInstance
|
||||
.withStatus(WorkflowInstance.WorkflowStatus.FAILURE)
|
||||
.withException(reason)
|
||||
.withEndedAt(System.currentTimeMillis());
|
||||
|
||||
getTimeSeriesDao().update(JsonUtils.pojoToJson(updatedInstance), workflowInstanceId);
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,6 +17,7 @@ import org.openmetadata.schema.governance.workflows.WorkflowInstanceState;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
import org.openmetadata.service.Entity;
|
||||
import org.openmetadata.service.resources.governance.WorkflowInstanceStateResource;
|
||||
import org.openmetadata.service.util.EntityUtil;
|
||||
import org.openmetadata.service.util.FullyQualifiedName;
|
||||
import org.openmetadata.service.util.ResultList;
|
||||
|
||||
@ -176,6 +177,51 @@ public class WorkflowInstanceStateRepository
|
||||
getTimeSeriesDao().update(JsonUtils.pojoToJson(workflowInstanceState), workflowInstanceStateId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks all states of a workflow instance as FAILED with the given reason.
|
||||
* Preserves audit trail instead of deleting the states.
|
||||
*/
|
||||
public void markInstanceStatesAsFailed(UUID workflowInstanceId, String reason) {
|
||||
try {
|
||||
// Get workflow definition to query states
|
||||
WorkflowInstanceRepository workflowInstanceRepository =
|
||||
(WorkflowInstanceRepository)
|
||||
Entity.getEntityTimeSeriesRepository(Entity.WORKFLOW_INSTANCE);
|
||||
WorkflowInstance instance =
|
||||
JsonUtils.readValue(
|
||||
workflowInstanceRepository.getTimeSeriesDao().getById(workflowInstanceId),
|
||||
WorkflowInstance.class);
|
||||
|
||||
WorkflowDefinitionRepository workflowDefinitionRepository =
|
||||
(WorkflowDefinitionRepository) Entity.getEntityRepository(Entity.WORKFLOW_DEFINITION);
|
||||
WorkflowDefinition workflowDefinition =
|
||||
workflowDefinitionRepository.get(
|
||||
null, instance.getWorkflowDefinitionId(), EntityUtil.Fields.EMPTY_FIELDS);
|
||||
|
||||
// Query all states for this workflow instance
|
||||
long endTs = System.currentTimeMillis();
|
||||
long startTs = endTs - (7L * 24 * 60 * 60 * 1000);
|
||||
|
||||
ResultList<WorkflowInstanceState> instanceStates =
|
||||
listWorkflowInstanceStateForInstance(
|
||||
workflowDefinition.getName(), workflowInstanceId, null, startTs, endTs, 1000, false);
|
||||
|
||||
// Mark all states as FAILURE
|
||||
for (WorkflowInstanceState state : instanceStates.getData()) {
|
||||
WorkflowInstanceState updatedState =
|
||||
state.withStatus(WorkflowInstance.WorkflowStatus.FAILURE).withException(reason);
|
||||
|
||||
getTimeSeriesDao().update(JsonUtils.pojoToJson(updatedState), state.getId());
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LOG.warn(
|
||||
"Failed to mark states as failed for instance {}: {}",
|
||||
workflowInstanceId,
|
||||
e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private String buildWorkflowInstanceFqn(
|
||||
String workflowDefinitionName, String workflowInstanceId) {
|
||||
return FullyQualifiedName.build(workflowDefinitionName, workflowInstanceId);
|
||||
|
||||
@ -1,12 +1,18 @@
|
||||
package org.openmetadata.service.migration.utils.v190;
|
||||
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.RELATED_ENTITY_VARIABLE;
|
||||
import static org.openmetadata.service.governance.workflows.Workflow.UPDATED_BY_VARIABLE;
|
||||
import static org.openmetadata.service.migration.utils.v170.MigrationUtil.createChart;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.jdbi.v3.core.Handle;
|
||||
@ -16,6 +22,10 @@ import org.openmetadata.schema.dataInsight.custom.LineChartMetric;
|
||||
import org.openmetadata.schema.entity.services.ingestionPipelines.IngestionPipeline;
|
||||
import org.openmetadata.schema.governance.workflows.WorkflowConfiguration;
|
||||
import org.openmetadata.schema.governance.workflows.WorkflowDefinition;
|
||||
import org.openmetadata.schema.governance.workflows.elements.EdgeDefinition;
|
||||
import org.openmetadata.schema.governance.workflows.elements.WorkflowNodeDefinitionInterface;
|
||||
import org.openmetadata.schema.governance.workflows.elements.WorkflowTriggerInterface;
|
||||
import org.openmetadata.schema.governance.workflows.elements.triggers.EventBasedEntityTriggerDefinition;
|
||||
import org.openmetadata.schema.type.Include;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
import org.openmetadata.service.Entity;
|
||||
@ -222,36 +232,206 @@ public class MigrationUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Update workflow definitions to set storeStageStatus = true for GlossaryApprovalWorkflow only.
|
||||
* Update GlossaryTermApprovalWorkflow
|
||||
*/
|
||||
public static void updateGlossaryTermApprovalWorkflow() {
|
||||
try {
|
||||
LOG.info(
|
||||
"Starting v190 workflow definition migration - updating storeStageStatus for GlossaryTermApprovalWorkflow");
|
||||
"Starting v190 migration - Adding reviewer auto-approval nodes, setting storeStageStatus to true in GlossaryTermApprovalWorkflow");
|
||||
WorkflowDefinitionRepository repository =
|
||||
(WorkflowDefinitionRepository) Entity.getEntityRepository(Entity.WORKFLOW_DEFINITION);
|
||||
try {
|
||||
|
||||
try {
|
||||
WorkflowDefinition workflowDefinition =
|
||||
repository.getByName(
|
||||
null, GLOSSARY_TERM_APPROVAL_WORKFLOW, EntityUtil.Fields.EMPTY_FIELDS);
|
||||
LOG.info("Updating workflow definition '{}'", workflowDefinition.getName());
|
||||
if (workflowDefinition.getConfig() == null) {
|
||||
workflowDefinition.setConfig(new WorkflowConfiguration().withStoreStageStatus(true));
|
||||
} else {
|
||||
// Update only storeStageStatus, preserve everything else
|
||||
workflowDefinition.getConfig().setStoreStageStatus(true);
|
||||
}
|
||||
LOG.info(
|
||||
"Adding reviewer auto-approval nodes to workflow '{}'", workflowDefinition.getName());
|
||||
|
||||
// Add new nodes if they don't already exist
|
||||
List<WorkflowNodeDefinitionInterface> nodes =
|
||||
new ArrayList<>(workflowDefinition.getNodes());
|
||||
List<EdgeDefinition> edges = new ArrayList<>(workflowDefinition.getEdges());
|
||||
|
||||
// Check and add AutoApprovedByReviewerEnd node
|
||||
if (nodes.stream().noneMatch(node -> "AutoApprovedByReviewerEnd".equals(node.getName()))) {
|
||||
WorkflowNodeDefinitionInterface autoApprovedEndNode =
|
||||
JsonUtils.readValue(
|
||||
"""
|
||||
{
|
||||
"type": "endEvent",
|
||||
"subType": "endEvent",
|
||||
"name": "AutoApprovedByReviewerEnd",
|
||||
"displayName": "Auto-Approved by Reviewer"
|
||||
}
|
||||
""",
|
||||
WorkflowNodeDefinitionInterface.class);
|
||||
nodes.add(autoApprovedEndNode);
|
||||
LOG.info("Added new node: AutoApprovedByReviewerEnd");
|
||||
}
|
||||
|
||||
// Check and add CheckIfGlossaryTermUpdatedByIsReviewer node
|
||||
if (nodes.stream()
|
||||
.noneMatch(node -> "CheckIfGlossaryTermUpdatedByIsReviewer".equals(node.getName()))) {
|
||||
WorkflowNodeDefinitionInterface checkReviewerNode =
|
||||
JsonUtils.readValue(
|
||||
"""
|
||||
{
|
||||
"type": "automatedTask",
|
||||
"subType": "checkEntityAttributesTask",
|
||||
"name": "CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"displayName": "Check if Glossary Term Updated By is Reviewer",
|
||||
"config": {
|
||||
"rules": "{\\"and\\":[{\\"isReviewer\\":{\\"var\\":\\"updatedBy\\"}}]}"
|
||||
},
|
||||
"inputNamespaceMap": {
|
||||
"relatedEntity": "global"
|
||||
}
|
||||
}
|
||||
""",
|
||||
WorkflowNodeDefinitionInterface.class);
|
||||
nodes.add(checkReviewerNode);
|
||||
LOG.info("Added new node: CheckIfGlossaryTermUpdatedByIsReviewer");
|
||||
}
|
||||
|
||||
// Check and add SetGlossaryTermStatusToApprovedByReviewer node
|
||||
if (nodes.stream()
|
||||
.noneMatch(
|
||||
node -> "SetGlossaryTermStatusToApprovedByReviewer".equals(node.getName()))) {
|
||||
WorkflowNodeDefinitionInterface setApprovedNode =
|
||||
JsonUtils.readValue(
|
||||
"""
|
||||
{
|
||||
"type": "automatedTask",
|
||||
"subType": "setGlossaryTermStatusTask",
|
||||
"name": "SetGlossaryTermStatusToApprovedByReviewer",
|
||||
"displayName": "Set Status to 'Approved' (By Reviewer)",
|
||||
"config": {
|
||||
"glossaryTermStatus": "Approved"
|
||||
},
|
||||
"inputNamespaceMap": {
|
||||
"relatedEntity": "global",
|
||||
"updatedBy": "global"
|
||||
}
|
||||
}
|
||||
""",
|
||||
WorkflowNodeDefinitionInterface.class);
|
||||
nodes.add(setApprovedNode);
|
||||
LOG.info("Added new node: SetGlossaryTermStatusToApprovedByReviewer");
|
||||
}
|
||||
|
||||
// Update the existing edge: CheckGlossaryTermHasReviewers ->
|
||||
// CheckGlossaryTermIsReadyToBeReviewed (true)
|
||||
// Change it to: CheckGlossaryTermHasReviewers -> CheckIfGlossaryTermUpdatedByIsReviewer
|
||||
// (true)
|
||||
for (EdgeDefinition edge : edges) {
|
||||
if ("CheckGlossaryTermHasReviewers".equals(edge.getFrom())
|
||||
&& "CheckGlossaryTermIsReadyToBeReviewed".equals(edge.getTo())
|
||||
&& "true".equals(edge.getCondition())) {
|
||||
edge.setTo("CheckIfGlossaryTermUpdatedByIsReviewer");
|
||||
LOG.info(
|
||||
"Updated edge: CheckGlossaryTermHasReviewers -> CheckIfGlossaryTermUpdatedByIsReviewer");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Add new edges if they don't exist
|
||||
addEdgeIfNotExists(
|
||||
edges,
|
||||
"CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"SetGlossaryTermStatusToApprovedByReviewer",
|
||||
"true");
|
||||
addEdgeIfNotExists(
|
||||
edges,
|
||||
"CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"false");
|
||||
addEdgeIfNotExists(
|
||||
edges, "SetGlossaryTermStatusToApprovedByReviewer", "AutoApprovedByReviewerEnd", null);
|
||||
|
||||
// Add filter field to trigger config if it doesn't exist
|
||||
addFilterToTriggerConfig(workflowDefinition);
|
||||
// Add updatedBy to trigger output if it doesn't exist
|
||||
addUpdatedByToTriggerOutput(workflowDefinition);
|
||||
|
||||
workflowDefinition.setNodes(nodes);
|
||||
workflowDefinition.setEdges(edges);
|
||||
|
||||
repository.createOrUpdate(null, workflowDefinition, ADMIN_USER_NAME);
|
||||
|
||||
LOG.info("Successfully updated workflow definition '{}'", workflowDefinition.getName());
|
||||
LOG.info(
|
||||
"Successfully added reviewer auto-approval nodes to workflow '{}'",
|
||||
workflowDefinition.getName());
|
||||
|
||||
} catch (Exception ex) {
|
||||
LOG.warn("GlossaryTermApprovalWorkflow not found or error updating: {}", ex.getMessage());
|
||||
LOG.info("This might be expected if the workflow doesn't exist yet");
|
||||
}
|
||||
LOG.info("Completed v190 workflow definition migration");
|
||||
|
||||
LOG.info("Completed v190 reviewer auto-approval nodes migration");
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to update workflow definitions", e);
|
||||
LOG.error("Failed to add reviewer auto-approval nodes to workflow", e);
|
||||
}
|
||||
}
|
||||
|
||||
private static void addEdgeIfNotExists(
|
||||
List<EdgeDefinition> edges, String from, String to, String condition) {
|
||||
boolean exists =
|
||||
edges.stream()
|
||||
.anyMatch(
|
||||
edge ->
|
||||
from.equals(edge.getFrom())
|
||||
&& to.equals(edge.getTo())
|
||||
&& Objects.equals(condition, edge.getCondition()));
|
||||
if (!exists) {
|
||||
EdgeDefinition newEdge = new EdgeDefinition().withFrom(from).withTo(to);
|
||||
if (condition != null) {
|
||||
newEdge.withCondition(condition);
|
||||
}
|
||||
edges.add(newEdge);
|
||||
LOG.info("Added new edge: {} -> {} (condition: {})", from, to, condition);
|
||||
}
|
||||
}
|
||||
|
||||
// Add filter field to trigger config (new field in this branch)
|
||||
private static void addFilterToTriggerConfig(WorkflowDefinition workflowDefinition) {
|
||||
try {
|
||||
if (workflowDefinition.getTrigger() != null) {
|
||||
// Convert trigger to JSON, modify, and convert back
|
||||
String triggerJson = JsonUtils.pojoToJson(workflowDefinition.getTrigger());
|
||||
JsonNode triggerNode = JsonUtils.readTree(triggerJson);
|
||||
|
||||
if (triggerNode instanceof ObjectNode) {
|
||||
ObjectNode triggerObj = (ObjectNode) triggerNode;
|
||||
|
||||
// Check if config exists and add filter if missing
|
||||
if (triggerObj.has("config")) {
|
||||
JsonNode configNode = triggerObj.get("config");
|
||||
if (configNode instanceof ObjectNode) {
|
||||
ObjectNode configObj = (ObjectNode) configNode;
|
||||
if (!configObj.has("filter")) {
|
||||
configObj.put("filter", "");
|
||||
|
||||
// Convert back to trigger object using WorkflowTriggerInterface
|
||||
WorkflowTriggerInterface updatedTrigger =
|
||||
JsonUtils.readValue(triggerObj.toString(), WorkflowTriggerInterface.class);
|
||||
workflowDefinition.setTrigger(updatedTrigger);
|
||||
LOG.info("Added filter field to trigger config: {\"and\":[]}");
|
||||
} else {
|
||||
LOG.info("Filter field already exists in trigger config");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Failed to add filter to trigger config: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -416,4 +596,31 @@ public class MigrationUtil {
|
||||
throw new RuntimeException("Migration v190 failed", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void addUpdatedByToTriggerOutput(WorkflowDefinition workflowDefinition) {
|
||||
try {
|
||||
WorkflowTriggerInterface trigger = workflowDefinition.getTrigger();
|
||||
if (trigger != null) {
|
||||
Set<String> currentOutput = trigger.getOutput();
|
||||
if (currentOutput == null || !currentOutput.contains(UPDATED_BY_VARIABLE)) {
|
||||
Set<String> newOutput = new HashSet<>();
|
||||
if (currentOutput != null) {
|
||||
newOutput.addAll(currentOutput);
|
||||
} else {
|
||||
newOutput.add(RELATED_ENTITY_VARIABLE);
|
||||
}
|
||||
newOutput.add(UPDATED_BY_VARIABLE);
|
||||
|
||||
if (trigger instanceof EventBasedEntityTriggerDefinition) {
|
||||
EventBasedEntityTriggerDefinition eventTrigger =
|
||||
(EventBasedEntityTriggerDefinition) trigger;
|
||||
eventTrigger.setOutput(newOutput);
|
||||
LOG.info("Updated trigger output to include updatedBy: {}", newOutput);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOG.warn("Failed to update trigger output: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,62 @@
|
||||
package org.openmetadata.service.rules;
|
||||
|
||||
import io.github.jamsesso.jsonlogic.ast.JsonLogicArray;
|
||||
import io.github.jamsesso.jsonlogic.evaluator.JsonLogicEvaluationException;
|
||||
import io.github.jamsesso.jsonlogic.evaluator.JsonLogicEvaluator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.openmetadata.schema.entity.teams.User;
|
||||
import org.openmetadata.schema.type.EntityReference;
|
||||
import org.openmetadata.schema.type.Include;
|
||||
import org.openmetadata.schema.utils.JsonUtils;
|
||||
import org.openmetadata.service.Entity;
|
||||
|
||||
public class JsonLogicUtils {
|
||||
|
||||
public static @NotNull Object evaluateUserInRole(
|
||||
JsonLogicEvaluator evaluator, JsonLogicArray arguments, Object data, String role)
|
||||
throws JsonLogicEvaluationException {
|
||||
if (arguments.size() != 1) return false;
|
||||
|
||||
Object resolvedArg = evaluator.evaluate(arguments.getFirst(), data);
|
||||
if (!(resolvedArg instanceof String updatedBy)) return false;
|
||||
|
||||
if (!(data instanceof Map<?, ?> entityMap)) return false;
|
||||
|
||||
Object reviewersObj = entityMap.get(role);
|
||||
if (reviewersObj == null) return false;
|
||||
|
||||
List<EntityReference> reviewers =
|
||||
JsonUtils.convertValue(
|
||||
reviewersObj,
|
||||
new com.fasterxml.jackson.core.type.TypeReference<List<EntityReference>>() {});
|
||||
// Direct Reviewer
|
||||
for (EntityReference ref : reviewers) {
|
||||
if ("user".equals(ref.getType()) && updatedBy.equals(ref.getName())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// Team Membership
|
||||
try {
|
||||
User user = Entity.getEntityByName(Entity.USER, updatedBy, "teams", Include.ALL);
|
||||
List<EntityReference> userTeams = user.getTeams();
|
||||
if (userTeams != null) {
|
||||
for (EntityReference ref : reviewers) {
|
||||
if ("team".equals(ref.getType())) {
|
||||
String reviewerTeamId = ref.getId().toString();
|
||||
if (reviewerTeamId != null
|
||||
&& userTeams.stream()
|
||||
.anyMatch(team -> reviewerTeamId.equals(team.getId().toString()))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// log error if needed
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -2,9 +2,14 @@ package org.openmetadata.service.rules;
|
||||
|
||||
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
|
||||
import static org.openmetadata.service.Entity.TEAM;
|
||||
import static org.openmetadata.service.rules.JsonLogicUtils.evaluateUserInRole;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import io.github.jamsesso.jsonlogic.JsonLogic;
|
||||
import io.github.jamsesso.jsonlogic.ast.JsonLogicArray;
|
||||
import io.github.jamsesso.jsonlogic.evaluator.JsonLogicEvaluationException;
|
||||
import io.github.jamsesso.jsonlogic.evaluator.JsonLogicEvaluator;
|
||||
import io.github.jamsesso.jsonlogic.evaluator.JsonLogicExpression;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.openmetadata.common.utils.CommonUtil;
|
||||
@ -15,7 +20,9 @@ import org.openmetadata.schema.utils.JsonUtils;
|
||||
public class LogicOps {
|
||||
|
||||
public enum CustomLogicOps {
|
||||
LENGTH("length");
|
||||
LENGTH("length"),
|
||||
IS_REVIEWER("isReviewer"),
|
||||
IS_OWNER("isOwner");
|
||||
|
||||
public final String key;
|
||||
|
||||
@ -40,6 +47,38 @@ public class LogicOps {
|
||||
}
|
||||
return args.length;
|
||||
});
|
||||
|
||||
// {"isReviewer": { var: "updatedBy"} }
|
||||
jsonLogic.addOperation(
|
||||
new JsonLogicExpression() {
|
||||
@Override
|
||||
public String key() {
|
||||
return CustomLogicOps.IS_REVIEWER.key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object evaluate(
|
||||
JsonLogicEvaluator evaluator, JsonLogicArray arguments, Object data)
|
||||
throws JsonLogicEvaluationException {
|
||||
return evaluateUserInRole(evaluator, arguments, data, "reviewers");
|
||||
}
|
||||
});
|
||||
|
||||
// {"isOwner": {var: "updatedBy"} }
|
||||
jsonLogic.addOperation(
|
||||
new JsonLogicExpression() {
|
||||
@Override
|
||||
public String key() {
|
||||
return CustomLogicOps.IS_OWNER.key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object evaluate(
|
||||
JsonLogicEvaluator evaluator, JsonLogicArray arguments, Object data)
|
||||
throws JsonLogicEvaluationException {
|
||||
return evaluateUserInRole(evaluator, arguments, data, "owners");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -2,10 +2,12 @@ package org.openmetadata.service.rules;
|
||||
|
||||
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import io.github.jamsesso.jsonlogic.JsonLogic;
|
||||
import io.github.jamsesso.jsonlogic.JsonLogicException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import lombok.Getter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.openmetadata.schema.EntityInterface;
|
||||
@ -39,6 +41,16 @@ public class RuleEngine {
|
||||
(DataContractRepository) Entity.getEntityRepository(Entity.DATA_CONTRACT);
|
||||
}
|
||||
|
||||
public Object apply(String rule, Map<String, Object> context) {
|
||||
try {
|
||||
rule = unescapeFilter(rule);
|
||||
return jsonLogicThreadLocal.get().apply(rule, context);
|
||||
} catch (Exception e) {
|
||||
// Return false, falls back to triggering workflow
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates the default platform entity semantics rules against the provided entity
|
||||
*/
|
||||
@ -169,4 +181,21 @@ public class RuleEngine {
|
||||
throw new RuleValidationException(rule, e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private DataContract getEntityDataContractSafely(EntityInterface entity) {
|
||||
try {
|
||||
return dataContractRepository.loadEntityDataContract(entity.getEntityReference());
|
||||
} catch (Exception e) {
|
||||
LOG.debug("Failed to load data contracts for entity {}: {}", entity.getId(), e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static String unescapeFilter(String filterLogic) throws JsonProcessingException {
|
||||
Object ruleObj = JsonUtils.getObjectMapper().readValue(filterLogic, Object.class);
|
||||
if (ruleObj instanceof String) {
|
||||
ruleObj = JsonUtils.getObjectMapper().readValue((String) ruleObj, Object.class);
|
||||
}
|
||||
return JsonUtils.getObjectMapper().writeValueAsString(ruleObj);
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,8 +11,10 @@
|
||||
"config": {
|
||||
"entityType": "glossaryTerm",
|
||||
"events": ["Created", "Updated"],
|
||||
"exclude": ["reviewers"]
|
||||
}
|
||||
"exclude": ["reviewers"],
|
||||
"filter": ""
|
||||
},
|
||||
"output": ["relatedEntity", "updatedBy"]
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
@ -21,6 +23,12 @@
|
||||
"name": "GlossaryTermCreated",
|
||||
"displayName": "Glossary Term Created or Updated"
|
||||
},
|
||||
{
|
||||
"type": "endEvent",
|
||||
"subType": "endEvent",
|
||||
"name": "AutoApprovedByReviewerEnd",
|
||||
"displayName": "Auto-Approved by Reviewer"
|
||||
},
|
||||
{
|
||||
"type": "endEvent",
|
||||
"subType": "endEvent",
|
||||
@ -45,13 +53,38 @@
|
||||
"name": "DraftEnd",
|
||||
"displayName": "Glossary Term Status: Draft"
|
||||
},
|
||||
{
|
||||
"type": "automatedTask",
|
||||
"subType": "checkEntityAttributesTask",
|
||||
"name": "CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"displayName": "Check if Glossary Term Updated By is Reviewer",
|
||||
"config": {
|
||||
"rules": "{\"and\":[{\"isReviewer\":{\"var\":\"updatedBy\"}}]}"
|
||||
},
|
||||
"inputNamespaceMap": {
|
||||
"relatedEntity": "global"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "automatedTask",
|
||||
"subType": "setGlossaryTermStatusTask",
|
||||
"name": "SetGlossaryTermStatusToApprovedByReviewer",
|
||||
"displayName": "Set Status to 'Approved' (By Reviewer)",
|
||||
"config": {
|
||||
"glossaryTermStatus": "Approved"
|
||||
},
|
||||
"inputNamespaceMap": {
|
||||
"relatedEntity": "global",
|
||||
"updatedBy": "global"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "automatedTask",
|
||||
"subType": "checkEntityAttributesTask",
|
||||
"name": "CheckGlossaryTermHasReviewers",
|
||||
"displayName": "Check if Glossary Term has Reviewers",
|
||||
"config": {
|
||||
"rules": "{\"and\":[{\"!!\":[{\"var\":\"reviewers\"}]}]}"
|
||||
"rules": "{\"and\":[{\"some\":[{\"var\":\"reviewers\"},{\"!=\":[{\"var\":\"fullyQualifiedName\"},null]}]}]}"
|
||||
},
|
||||
"inputNamespaceMap": {
|
||||
"relatedEntity": "global"
|
||||
@ -158,9 +191,23 @@
|
||||
},
|
||||
{
|
||||
"from": "CheckGlossaryTermHasReviewers",
|
||||
"to": "CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"to": "CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"condition": "true"
|
||||
},
|
||||
{
|
||||
"from": "CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"to": "SetGlossaryTermStatusToApprovedByReviewer",
|
||||
"condition": "true"
|
||||
},
|
||||
{
|
||||
"from": "CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"to": "CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"condition": "false"
|
||||
},
|
||||
{
|
||||
"from": "SetGlossaryTermStatusToApprovedByReviewer",
|
||||
"to": "AutoApprovedByReviewerEnd"
|
||||
},
|
||||
{
|
||||
"from": "CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"to": "SetGlossaryTermStatusToDraft",
|
||||
|
||||
@ -333,11 +333,16 @@ public class GlossaryApprovalWorkflowTest extends OpenMetadataApplicationTest {
|
||||
// Sort states by timestamp before asserting
|
||||
states.sort(Comparator.comparing(WorkflowInstanceState::getTimestamp));
|
||||
|
||||
// [GlossaryTermCreated, CheckGlossaryTermHasReviewers, CheckIfGlossaryTermUpdatedByIsReviewer,
|
||||
// CheckGlossaryTermIsReadyToBeReviewed, SetGlossaryTermStatusToInReview, ApproveGlossaryTerm,
|
||||
// SetGlossaryTermStatusToApprovedAfterApproval, ApprovedEndAfterApproval]
|
||||
|
||||
// Assert the expected sequence of workflow states for reviewer approval
|
||||
List<String> expectedStages =
|
||||
List.of(
|
||||
"GlossaryTermCreated",
|
||||
"CheckGlossaryTermHasReviewers",
|
||||
"CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"SetGlossaryTermStatusToInReview",
|
||||
"ApproveGlossaryTerm",
|
||||
@ -350,6 +355,8 @@ public class GlossaryApprovalWorkflowTest extends OpenMetadataApplicationTest {
|
||||
Map.of(
|
||||
"GlossaryTermCreated", "Glossary Term Created or Updated",
|
||||
"CheckGlossaryTermHasReviewers", "Check if Glossary Term has Reviewers",
|
||||
"CheckIfGlossaryTermUpdatedByIsReviewer",
|
||||
"Check if Glossary Term Updated By is Reviewer",
|
||||
"CheckGlossaryTermIsReadyToBeReviewed",
|
||||
"Check if Glossary Term is Ready to be Reviewed",
|
||||
"SetGlossaryTermStatusToInReview", "Set Status to 'In Review'",
|
||||
|
||||
@ -50,6 +50,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import io.socket.client.IO;
|
||||
import io.socket.client.Socket;
|
||||
import jakarta.ws.rs.client.WebTarget;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
@ -95,6 +96,7 @@ import org.openmetadata.schema.entity.data.GlossaryTerm;
|
||||
import org.openmetadata.schema.entity.data.GlossaryTerm.Status;
|
||||
import org.openmetadata.schema.entity.data.Table;
|
||||
import org.openmetadata.schema.entity.feed.Thread;
|
||||
import org.openmetadata.schema.entity.teams.Team;
|
||||
import org.openmetadata.schema.entity.type.CustomProperty;
|
||||
import org.openmetadata.schema.entity.type.Style;
|
||||
import org.openmetadata.schema.type.ChangeDescription;
|
||||
@ -119,6 +121,7 @@ import org.openmetadata.service.resources.feeds.MessageParser.EntityLink;
|
||||
import org.openmetadata.service.resources.metadata.TypeResourceTest;
|
||||
import org.openmetadata.service.resources.tags.ClassificationResourceTest;
|
||||
import org.openmetadata.service.resources.tags.TagResourceTest;
|
||||
import org.openmetadata.service.security.SecurityUtil;
|
||||
import org.openmetadata.service.socket.WebSocketManager;
|
||||
import org.openmetadata.service.util.EntityUtil;
|
||||
import org.openmetadata.service.util.FullyQualifiedName;
|
||||
@ -2070,6 +2073,423 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
|
||||
tableResourceTest.deleteEntity(table4.getId(), ADMIN_AUTH_HEADERS);
|
||||
}
|
||||
|
||||
// Test 1: Glossary has no reviewers, Term has reviewers, User who created is the Reviewer, so the
|
||||
// term goes to approved.
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TermHasReviewers_CreatorIsReviewer_AutoApproved(TestInfo test)
|
||||
throws Exception {
|
||||
// Create glossary with no reviewers
|
||||
Glossary glossary = createGlossary(test, null, null);
|
||||
|
||||
// Create term with specific reviewers, where creator is one of the reviewers
|
||||
CreateGlossaryTerm createRequest =
|
||||
new CreateGlossaryTerm()
|
||||
.withName("termWithReviewers")
|
||||
.withDescription("Term created by reviewer")
|
||||
.withGlossary(glossary.getFullyQualifiedName())
|
||||
.withReviewers(listOf(USER1.getEntityReference(), USER2.getEntityReference()));
|
||||
|
||||
// Create the term as USER1 (who is a reviewer of the term itself)
|
||||
GlossaryTerm createdTerm = createEntity(createRequest, authHeaders(USER1.getName()));
|
||||
|
||||
// Wait for workflow to process
|
||||
// The workflow checks updatedBy field. Since USER1 is a reviewer, term should be auto-approved
|
||||
java.lang.Thread.sleep(10000); // Wait 10 seconds for workflow to complete
|
||||
GlossaryTerm updatedTerm = getEntity(createdTerm.getId(), "", authHeaders(USER1.getName()));
|
||||
|
||||
// The term should be auto-approved since creator (USER1) is a reviewer
|
||||
assertEquals(
|
||||
Status.APPROVED,
|
||||
updatedTerm.getStatus(),
|
||||
"Term should be auto-approved when creator is a reviewer");
|
||||
|
||||
// CRITICAL: Verify that updatedBy is the reviewer (USER1), not governance-bot
|
||||
assertEquals(
|
||||
USER1.getName(),
|
||||
updatedTerm.getUpdatedBy(),
|
||||
"Term should be updated by the reviewer (USER1), not governance-bot");
|
||||
|
||||
// Verify: No workflow task should be created since term was auto-approved
|
||||
assertFalse(
|
||||
wasWorkflowTaskCreated(createdTerm.getFullyQualifiedName(), 2000),
|
||||
"No workflow task should be created for auto-approved term");
|
||||
}
|
||||
|
||||
// Test 2: Term has reviewers, the user who updated the term is a reviewer, so the term stays
|
||||
// approved.
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TermHasReviewers_UpdatedByIsReviewer_NoWorkflow(TestInfo test)
|
||||
throws Exception {
|
||||
// Create glossary with no reviewers
|
||||
Glossary glossary = createGlossary(test, null, null);
|
||||
|
||||
// Create term with specific reviewers
|
||||
GlossaryTerm term =
|
||||
createTerm(
|
||||
glossary,
|
||||
null,
|
||||
"termForUpdate",
|
||||
listOf(USER1.getEntityReference(), USER2.getEntityReference()));
|
||||
|
||||
// Initially the term should go through workflow since it was created by admin
|
||||
waitForTaskToBeCreated(term.getFullyQualifiedName(), 30000L);
|
||||
Thread approvalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
taskTest.resolveTask(
|
||||
approvalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Update the term as USER1 (who is a reviewer)
|
||||
String json = JsonUtils.pojoToJson(term);
|
||||
term.setDescription("Updated by reviewer USER1");
|
||||
GlossaryTerm updatedTerm = patchEntity(term.getId(), json, term, authHeaders(USER1.getName()));
|
||||
|
||||
// CRITICAL: Verify that updatedBy is the reviewer (USER1), not governance-bot
|
||||
assertEquals(
|
||||
USER1.getName(),
|
||||
updatedTerm.getUpdatedBy(),
|
||||
"Term should be updated by the reviewer (USER1), not governance-bot");
|
||||
|
||||
// Verify no workflow task was created
|
||||
boolean taskCreated = wasWorkflowTaskCreated(term.getFullyQualifiedName(), 5000L);
|
||||
assertFalse(taskCreated, "No workflow should be triggered when reviewer updates the term");
|
||||
}
|
||||
|
||||
// Test 3: Term has reviewers, but the user who created the term is not a reviewer, so the term is
|
||||
// created DRAFT and moves to in review after the task is created for the reviewer
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TermHasReviewers_CreatorNotReviewer_WorkflowTriggered(
|
||||
TestInfo test) throws Exception {
|
||||
// Create glossary with no reviewers
|
||||
Glossary glossary = createGlossary(test, null, null);
|
||||
|
||||
// Create term with specific reviewers, where creator is NOT one of the reviewers
|
||||
CreateGlossaryTerm createRequest =
|
||||
new CreateGlossaryTerm()
|
||||
.withName("termNotByReviewer")
|
||||
.withDescription("Term created by non-reviewer")
|
||||
.withGlossary(glossary.getFullyQualifiedName())
|
||||
.withReviewers(listOf(USER1.getEntityReference(), USER2.getEntityReference()));
|
||||
|
||||
// Create the term as admin (who is NOT a reviewer of the term)
|
||||
GlossaryTerm createdTerm = createEntity(createRequest, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Verify: Workflow task should be created and term should move to IN_REVIEW
|
||||
waitForTaskToBeCreated(createdTerm.getFullyQualifiedName(), 30000L);
|
||||
Thread approvalTask = assertApprovalTask(createdTerm, TaskStatus.Open);
|
||||
|
||||
// Fetch the updated term to see status change
|
||||
GlossaryTerm updatedTerm = getEntity(createdTerm.getId(), "", ADMIN_AUTH_HEADERS);
|
||||
assertEquals(
|
||||
Status.IN_REVIEW,
|
||||
updatedTerm.getStatus(),
|
||||
"Term should be moved to IN_REVIEW after workflow creation");
|
||||
|
||||
// Resolve the task to complete the workflow and prevent EntityNotFoundException
|
||||
taskTest.resolveTask(
|
||||
approvalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
}
|
||||
|
||||
// Test 4: Term has reviewers, but the user who updated the term is not a reviewer, so the term is
|
||||
// updated to DRAFT and moves to in review after the task is created for the reviewer.
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TermHasReviewers_UpdatedByNotReviewer_WorkflowTriggered(
|
||||
TestInfo test) throws Exception {
|
||||
// Create glossary with no reviewers
|
||||
Glossary glossary = createGlossary(test, null, null);
|
||||
|
||||
// Create and approve term first with reviewers
|
||||
GlossaryTerm term =
|
||||
createTerm(
|
||||
glossary,
|
||||
null,
|
||||
"termForNonReviewerUpdate",
|
||||
listOf(USER1.getEntityReference(), USER2.getEntityReference()));
|
||||
|
||||
// Initially approve the term
|
||||
waitForTaskToBeCreated(term.getFullyQualifiedName(), 30000L);
|
||||
Thread approvalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
taskTest.resolveTask(
|
||||
approvalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Ensure DATA_CONSUMER has permission to update terms - add as owner
|
||||
String json = JsonUtils.pojoToJson(term);
|
||||
term.setOwners(listOf(DATA_CONSUMER.getEntityReference()));
|
||||
term = patchEntity(term.getId(), json, term, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Update by non-reviewer (DATA_CONSUMER) - should trigger workflow
|
||||
json = JsonUtils.pojoToJson(term);
|
||||
term.setDescription("Updated by non-reviewer DATA_CONSUMER");
|
||||
patchEntity(term.getId(), json, term, authHeaders(DATA_CONSUMER.getName()));
|
||||
|
||||
// Verify workflow task was created
|
||||
boolean taskCreated = wasWorkflowTaskCreated(term.getFullyQualifiedName(), 30000L);
|
||||
assertTrue(taskCreated, "Workflow should be triggered when non-reviewer updates the term");
|
||||
|
||||
// Verify term status moved to IN_REVIEW
|
||||
GlossaryTerm updatedTerm = getEntity(term.getId(), "", ADMIN_AUTH_HEADERS);
|
||||
assertEquals(
|
||||
Status.IN_REVIEW,
|
||||
updatedTerm.getStatus(),
|
||||
"Term should be moved to IN_REVIEW after non-reviewer update");
|
||||
|
||||
// Resolve the task to complete the workflow and prevent EntityNotFoundException
|
||||
Thread newApprovalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
try {
|
||||
taskTest.resolveTask(
|
||||
newApprovalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Wait for task resolution workflow to complete
|
||||
java.lang.Thread.sleep(5000);
|
||||
|
||||
// CRITICAL: Verify final term has been approved by USER1, not governance-bot
|
||||
GlossaryTerm finalTerm = getEntity(term.getId(), "", ADMIN_AUTH_HEADERS);
|
||||
assertEquals(
|
||||
Status.APPROVED, finalTerm.getStatus(), "Term should be approved after task resolution");
|
||||
assertEquals(
|
||||
USER1.getName(),
|
||||
finalTerm.getUpdatedBy(),
|
||||
"Term should be updated by the approver (USER1), not governance-bot");
|
||||
} catch (Exception ignore) {
|
||||
// Ignore failure - should be flowable lock exception, because the tests are happening fast
|
||||
}
|
||||
// Delete the Term
|
||||
try {
|
||||
deleteEntity(updatedTerm.getId(), true, true, authHeaders(USER1.getName()));
|
||||
} catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
|
||||
// Test 5: Team membership test - User is part of a reviewer team, so auto-approved
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TeamReviewer_MemberCreatesTermAutoApproved(TestInfo test)
|
||||
throws Exception {
|
||||
// Get existing team and explicitly add USER1 to it for this test
|
||||
Team reviewerTeam =
|
||||
Entity.getEntityByName(Entity.TEAM, "Organization", "users", Include.NON_DELETED);
|
||||
|
||||
// Add ADMIN to the Organization team for this test (so ADMIN can create terms as a team member)
|
||||
String jsonTeam = JsonUtils.pojoToJson(reviewerTeam);
|
||||
List<EntityReference> currentUsers =
|
||||
reviewerTeam.getUsers() != null
|
||||
? new ArrayList<>(reviewerTeam.getUsers())
|
||||
: new ArrayList<>();
|
||||
currentUsers.add(
|
||||
Entity.getEntityReferenceByName(
|
||||
Entity.USER,
|
||||
"admin",
|
||||
Include.NON_DELETED)); // Add ADMIN to team so they can create terms as team member
|
||||
reviewerTeam.setUsers(currentUsers);
|
||||
|
||||
// Update the team to include ADMIN
|
||||
Entity.getEntityRepository(Entity.TEAM)
|
||||
.patch(
|
||||
null,
|
||||
reviewerTeam.getId(),
|
||||
"admin",
|
||||
JsonUtils.getJsonPatch(jsonTeam, JsonUtils.pojoToJson(reviewerTeam)),
|
||||
null);
|
||||
|
||||
// Create glossary with team as reviewer
|
||||
Glossary glossary = createGlossary(test, listOf(reviewerTeam.getEntityReference()), null);
|
||||
|
||||
// Create term directly as ADMIN (who is now a member of the reviewer team)
|
||||
CreateGlossaryTerm createRequest =
|
||||
new CreateGlossaryTerm()
|
||||
.withName("termByTeamMember")
|
||||
.withDescription("Term created by team member ADMIN")
|
||||
.withGlossary(glossary.getFullyQualifiedName());
|
||||
|
||||
// Create directly with ADMIN (who is now a team member and reviewer)
|
||||
GlossaryTerm createdTerm = createEntity(createRequest, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Wait for workflow to process and check final status
|
||||
java.lang.Thread.sleep(10000); // Wait for workflow to complete
|
||||
GlossaryTerm updatedTerm = getEntity(createdTerm.getId(), "", ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Term should be auto-approved since ADMIN is a member of the reviewer team
|
||||
assertEquals(
|
||||
Status.APPROVED,
|
||||
updatedTerm.getStatus(),
|
||||
"Term should be auto-approved when created by team member");
|
||||
|
||||
// CRITICAL: Verify that updatedBy is the team member (admin), not governance-bot
|
||||
assertEquals(
|
||||
"admin",
|
||||
updatedTerm.getUpdatedBy(),
|
||||
"Term should be updated by the team member (admin), not governance-bot");
|
||||
|
||||
// Verify: No workflow task should be created since term was auto-approved
|
||||
assertFalse(
|
||||
wasWorkflowTaskCreated(createdTerm.getFullyQualifiedName(), 2000),
|
||||
"No workflow task should be created for auto-approved term");
|
||||
}
|
||||
|
||||
// Test 6: Team membership test - User updates term and is part of reviewer team, so no workflow
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_TeamReviewer_MemberUpdatesTermNoWorkflow(TestInfo test)
|
||||
throws Exception {
|
||||
// Get existing team and add USER1 to it for this test
|
||||
Team reviewerTeam =
|
||||
Entity.getEntityByName(Entity.TEAM, "Organization", "users", Include.NON_DELETED);
|
||||
|
||||
// Add USER1 to the Organization team for this test (if not already added)
|
||||
String jsonTeam = JsonUtils.pojoToJson(reviewerTeam);
|
||||
List<EntityReference> currentUsers =
|
||||
reviewerTeam.getUsers() != null
|
||||
? new ArrayList<>(reviewerTeam.getUsers())
|
||||
: new ArrayList<>();
|
||||
if (currentUsers.stream().noneMatch(u -> u.getId().equals(USER1.getId()))) {
|
||||
currentUsers.add(USER1.getEntityReference());
|
||||
reviewerTeam.setUsers(currentUsers);
|
||||
|
||||
// Update the team to include USER1
|
||||
Entity.getEntityRepository(Entity.TEAM)
|
||||
.patch(
|
||||
null,
|
||||
reviewerTeam.getId(),
|
||||
"admin",
|
||||
JsonUtils.getJsonPatch(jsonTeam, JsonUtils.pojoToJson(reviewerTeam)),
|
||||
null);
|
||||
}
|
||||
|
||||
// Create glossary with team as reviewer
|
||||
Glossary glossary = createGlossary(test, listOf(reviewerTeam.getEntityReference()), null);
|
||||
|
||||
// Create term by admin first (not a team member)
|
||||
GlossaryTerm term = createTerm(glossary, null, "termForTeamMemberUpdate");
|
||||
|
||||
// Simplified test - just verify term creation works with team reviewers
|
||||
assertNotNull(term);
|
||||
// Term starts with DRAFT
|
||||
assertSame(Status.DRAFT, term.getStatus());
|
||||
java.lang.Thread.sleep(10000L);
|
||||
GlossaryTerm glossaryTerm = getEntity(term.getId(), ADMIN_AUTH_HEADERS);
|
||||
// Auto approval after the workflow is triggered
|
||||
assertSame(Status.APPROVED, glossaryTerm.getStatus());
|
||||
LOG.info(
|
||||
"Team reviewer update test - term created successfully with status: {}", term.getStatus());
|
||||
}
|
||||
|
||||
// Test 7: Custom jsonLogic filter test - using existing isOwner filter
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_CustomFilter_IsOwner_NoWorkflow(TestInfo test) throws Exception {
|
||||
// Create glossary with reviewers
|
||||
CreateGlossary createGlossary =
|
||||
glossaryTest
|
||||
.createRequest(getEntityName(test))
|
||||
.withReviewers(listOf(USER1.getEntityReference()))
|
||||
.withOwners(listOf(USER2.getEntityReference()));
|
||||
Glossary glossary = glossaryTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Create and approve term first
|
||||
GlossaryTerm term =
|
||||
createTerm(
|
||||
glossary,
|
||||
null,
|
||||
"isOwnerFilterTest",
|
||||
listOf(USER1.getEntityReference()),
|
||||
listOf(USER2.getEntityReference()),
|
||||
ADMIN_AUTH_HEADERS);
|
||||
waitForTaskToBeCreated(term.getFullyQualifiedName(), 30000L);
|
||||
Thread approvalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
taskTest.resolveTask(
|
||||
approvalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Patch workflow to include isOwner in OR condition
|
||||
String patchJson =
|
||||
"[{\"op\":\"replace\",\"path\":\"/trigger/config/filter\",\"value\":\"{\\\"or\\\":[{\\\"isReviewer\\\":{\\\"var\\\":\\\"updatedBy\\\"}},{\\\"isOwner\\\":{\\\"var\\\":\\\"updatedBy\\\"}}]}\"}]";
|
||||
patchWorkflowDefinition("GlossaryTermApprovalWorkflow", patchJson);
|
||||
|
||||
// Wait for workflow patch to take effect
|
||||
java.lang.Thread.sleep(2000);
|
||||
|
||||
// Update by owner USER2 - should NOT trigger workflow (isOwner = true in OR condition)
|
||||
String json = JsonUtils.pojoToJson(term);
|
||||
term.setDescription("Updated by owner USER2");
|
||||
patchEntity(term.getId(), json, term, authHeaders(USER2.getName()));
|
||||
|
||||
// Verify no workflow task was created
|
||||
boolean taskCreated = wasWorkflowTaskCreated(term.getFullyQualifiedName(), 5000L);
|
||||
assertFalse(
|
||||
taskCreated,
|
||||
"No workflow should be triggered when owner updates the term with isOwner filter");
|
||||
|
||||
// Reset workflow filter back to empty AND
|
||||
String resetPatchJson =
|
||||
"[{\"op\":\"replace\",\"path\":\"/trigger/config/filter\",\"value\":\"{\\\"and\\\":[]}\"}]";
|
||||
patchWorkflowDefinition("GlossaryTermApprovalWorkflow", resetPatchJson);
|
||||
}
|
||||
|
||||
// Test 8: Custom jsonLogic filter test - using AND operator
|
||||
@Test
|
||||
void test_GlossaryTermWorkflow_CustomFilter_AndOperator_ConditionalTrigger(TestInfo test)
|
||||
throws Exception {
|
||||
// Create glossary with reviewers
|
||||
CreateGlossary createGlossary =
|
||||
glossaryTest
|
||||
.createRequest(getEntityName(test))
|
||||
.withReviewers(listOf(USER1.getEntityReference()));
|
||||
Glossary glossary = glossaryTest.createEntity(createGlossary, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Create and approve term first
|
||||
GlossaryTerm term = createTerm(glossary, null, "andOperatorTest");
|
||||
waitForTaskToBeCreated(term.getFullyQualifiedName(), 30000L);
|
||||
Thread approvalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
taskTest.resolveTask(
|
||||
approvalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Patch workflow to use AND: isReviewer AND description exists
|
||||
String patchJson =
|
||||
"[{\"op\":\"replace\",\"path\":\"/trigger/config/filter\",\"value\":\"{\\\"and\\\":[{\\\"isReviewer\\\":{\\\"var\\\":\\\"updatedBy\\\"}},{\\\"!=\\\":[{\\\"var\\\":\\\"description\\\"},null]}]}\"}]";
|
||||
patchWorkflowDefinition("GlossaryTermApprovalWorkflow", patchJson);
|
||||
|
||||
// Wait for workflow patch to take effect
|
||||
java.lang.Thread.sleep(5000L);
|
||||
|
||||
// Update by reviewer USER1 with description - should NOT trigger (isReviewer=true AND
|
||||
// description exists=true, result=true, negated=false)
|
||||
String json = JsonUtils.pojoToJson(term);
|
||||
term.setDescription("Updated by reviewer with description");
|
||||
patchEntity(term.getId(), json, term, authHeaders(USER1.getName()));
|
||||
|
||||
// Verify no workflow task was created
|
||||
boolean taskCreated = wasWorkflowTaskCreated(term.getFullyQualifiedName(), 5000L);
|
||||
assertFalse(taskCreated, "No workflow should be triggered when AND condition is true");
|
||||
|
||||
// Update by non-reviewer (admin) with description - should trigger (isReviewer=false AND
|
||||
// description exists=true, result=false, negated=true)
|
||||
json = JsonUtils.pojoToJson(term);
|
||||
term.setDescription("Updated by non-reviewer admin");
|
||||
patchEntity(term.getId(), json, term, ADMIN_AUTH_HEADERS);
|
||||
|
||||
// Verify workflow task was created
|
||||
taskCreated = wasWorkflowTaskCreated(term.getFullyQualifiedName(), 30000L);
|
||||
assertTrue(taskCreated, "Workflow should be triggered when AND condition is false");
|
||||
|
||||
// Resolve the task to complete the workflow and prevent EntityNotFoundException
|
||||
Thread newApprovalTask = assertApprovalTask(term, TaskStatus.Open);
|
||||
taskTest.resolveTask(
|
||||
newApprovalTask.getTask().getId(),
|
||||
new ResolveTask().withNewValue("Approved"),
|
||||
authHeaders(USER1.getName()));
|
||||
|
||||
// Reset workflow filter back to empty AND
|
||||
String resetPatchJson =
|
||||
"[{\"op\":\"replace\",\"path\":\"/trigger/config/filter\",\"value\":\"{\\\"and\\\":[]}\"}]";
|
||||
patchWorkflowDefinition("GlossaryTermApprovalWorkflow", resetPatchJson);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to receive move entity message via WebSocket
|
||||
*/
|
||||
@ -2179,4 +2599,42 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
|
||||
WebTarget target = getCollection().path(String.format("/%s/assets/add", term.getId()));
|
||||
TestUtils.put(target, payload, BulkOperationResult.class, OK, ADMIN_AUTH_HEADERS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to get workflow definition by name
|
||||
*/
|
||||
private WebTarget getWorkflowDefinitionByName(String name) {
|
||||
return getResource("governance/workflowDefinitions/name/" + name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to patch workflow definition
|
||||
*/
|
||||
private void patchWorkflowDefinition(String name, String jsonPatchString) throws Exception {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
JsonNode patch = mapper.readTree(jsonPatchString);
|
||||
|
||||
WebTarget target = getWorkflowDefinitionByName(name);
|
||||
Response response =
|
||||
SecurityUtil.addHeaders(target, ADMIN_AUTH_HEADERS)
|
||||
.method(
|
||||
"PATCH",
|
||||
jakarta.ws.rs.client.Entity.entity(
|
||||
patch.toString(), MediaType.APPLICATION_JSON_PATCH_JSON_TYPE));
|
||||
|
||||
assertEquals(200, response.getStatus(), "Failed to patch workflow definition");
|
||||
java.lang.Thread.sleep(1000); // Wait for change to take effect
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to check if workflow task was created
|
||||
*/
|
||||
private boolean wasWorkflowTaskCreated(String termFqn, long timeoutMs) {
|
||||
try {
|
||||
waitForTaskToBeCreated(termFqn, timeoutMs);
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,6 +42,11 @@
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"title": "Filter Condition",
|
||||
"description": "JSON Logic expression to determine if the workflow should be triggered. The expression has access to: entity (current entity), changeDescription (what changed), updatedBy (user who made the change), changedFields (array of field names that changed).",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
|
||||
@ -74,7 +74,9 @@ import {
|
||||
verifyColumnsVisibility,
|
||||
verifyGlossaryDetails,
|
||||
verifyGlossaryTermAssets,
|
||||
verifyGlossaryWorkflowReviewerCase,
|
||||
verifyTaskCreated,
|
||||
verifyWorkflowInstanceExists,
|
||||
} from '../../utils/glossary';
|
||||
import { sidebarClick } from '../../utils/sidebar';
|
||||
import { TaskDetails } from '../../utils/task';
|
||||
@ -84,6 +86,7 @@ const user1 = new UserClass();
|
||||
const user2 = new UserClass();
|
||||
const team = new TeamClass();
|
||||
const user3 = new UserClass();
|
||||
const user4 = new UserClass();
|
||||
|
||||
test.describe('Glossary tests', () => {
|
||||
test.beforeAll(async ({ browser }) => {
|
||||
@ -91,6 +94,7 @@ test.describe('Glossary tests', () => {
|
||||
await user2.create(apiContext);
|
||||
await user1.create(apiContext);
|
||||
await user3.create(apiContext);
|
||||
await user4.create(apiContext);
|
||||
team.data.users = [user2.responseData.id];
|
||||
await team.create(apiContext);
|
||||
await afterAction();
|
||||
@ -1583,28 +1587,32 @@ test.describe('Glossary tests', () => {
|
||||
await afterAction();
|
||||
});
|
||||
|
||||
test('Verify Glossary Term Workflow', async ({ browser }) => {
|
||||
test('Term should stay approved when changes made by reviewer', async ({
|
||||
browser,
|
||||
}) => {
|
||||
test.slow(true);
|
||||
|
||||
const { page, afterAction, apiContext } = await performAdminLogin(browser);
|
||||
const glossaryWorkflow = new Glossary();
|
||||
const glossaryTermWorkflow = new GlossaryTerm(glossaryWorkflow);
|
||||
const reviewerUser = new UserClass();
|
||||
try {
|
||||
await glossaryWorkflow.create(apiContext);
|
||||
await glossaryTermWorkflow.create(apiContext);
|
||||
await reviewerUser.create(apiContext);
|
||||
const glossary = new Glossary();
|
||||
const glossaryTerm = new GlossaryTerm(glossary);
|
||||
|
||||
await glossaryWorkflow.patch(apiContext, [
|
||||
const { page, afterAction, apiContext } = await performAdminLogin(browser);
|
||||
const { page: reviewerPage, afterAction: reviewerAfterAction } =
|
||||
await performUserLogin(browser, user4);
|
||||
|
||||
try {
|
||||
await glossary.create(apiContext);
|
||||
await glossaryTerm.create(apiContext);
|
||||
|
||||
await glossary.patch(apiContext, [
|
||||
{
|
||||
op: 'add',
|
||||
path: '/reviewers/0',
|
||||
value: {
|
||||
id: reviewerUser.responseData.id,
|
||||
id: user4.responseData.id,
|
||||
type: 'user',
|
||||
displayName: reviewerUser.responseData.displayName,
|
||||
fullyQualifiedName: reviewerUser.responseData.fullyQualifiedName,
|
||||
name: reviewerUser.responseData.name,
|
||||
displayName: user4.responseData.displayName,
|
||||
fullyQualifiedName: user4.responseData.fullyQualifiedName,
|
||||
name: user4.responseData.name,
|
||||
},
|
||||
},
|
||||
]);
|
||||
@ -1614,11 +1622,18 @@ test.describe('Glossary tests', () => {
|
||||
async () => {
|
||||
await redirectToHomePage(page);
|
||||
await sidebarClick(page, SidebarItem.GLOSSARY);
|
||||
await selectActiveGlossary(page, glossaryWorkflow.data.displayName);
|
||||
await selectActiveGlossary(page, glossary.data.displayName);
|
||||
|
||||
await verifyWorkflowInstanceExists(
|
||||
page,
|
||||
glossaryTerm.responseData.fullyQualifiedName
|
||||
);
|
||||
|
||||
// Test workflow widget on hover
|
||||
const escapedFqn =
|
||||
glossaryTermWorkflow.data.fullyQualifiedName.replace(/"/g, '\\"');
|
||||
const escapedFqn = glossaryTerm.data.fullyQualifiedName.replace(
|
||||
/"/g,
|
||||
'\\"'
|
||||
);
|
||||
const statusSelector = `[data-testid="${escapedFqn}-status"]`;
|
||||
await page.hover(statusSelector);
|
||||
|
||||
@ -1629,21 +1644,52 @@ test.describe('Glossary tests', () => {
|
||||
await clickOutside(page);
|
||||
|
||||
// Test workflow widget on term details page
|
||||
await selectActiveGlossaryTerm(
|
||||
page,
|
||||
glossaryTermWorkflow.data.displayName
|
||||
);
|
||||
await selectActiveGlossaryTerm(page, glossaryTerm.data.displayName);
|
||||
|
||||
await expect(
|
||||
page.locator('[data-testid="workflow-history-widget"]')
|
||||
).toBeVisible();
|
||||
}
|
||||
);
|
||||
|
||||
await test.step('Perform Changes by reviewer', async () => {
|
||||
await redirectToHomePage(reviewerPage);
|
||||
await sidebarClick(reviewerPage, SidebarItem.GLOSSARY);
|
||||
await selectActiveGlossary(reviewerPage, glossary.data.displayName);
|
||||
await selectActiveGlossaryTerm(
|
||||
reviewerPage,
|
||||
glossaryTerm.data.displayName
|
||||
);
|
||||
|
||||
await updateDescription(
|
||||
reviewerPage,
|
||||
'Demo description to be updated',
|
||||
true
|
||||
);
|
||||
|
||||
await verifyGlossaryWorkflowReviewerCase(
|
||||
reviewerPage,
|
||||
glossaryTerm.responseData.fullyQualifiedName
|
||||
);
|
||||
|
||||
const waitForInstanceRes = reviewerPage.waitForResponse(
|
||||
'/api/v1/governance/workflowInstanceStates/GlossaryTermApprovalWorkflow/*'
|
||||
);
|
||||
await reviewerPage.reload();
|
||||
await waitForInstanceRes;
|
||||
await reviewerPage.getByTestId('workflow-history-widget').click();
|
||||
|
||||
await expect(
|
||||
reviewerPage
|
||||
.getByTestId('workflow-history-widget')
|
||||
.getByText('Auto-Approved by Reviewer')
|
||||
).toBeVisible();
|
||||
});
|
||||
} finally {
|
||||
await glossaryWorkflow.delete(apiContext);
|
||||
await glossaryTermWorkflow.delete(apiContext);
|
||||
await reviewerUser.delete(apiContext);
|
||||
await glossary.delete(apiContext);
|
||||
await glossaryTerm.delete(apiContext);
|
||||
await afterAction();
|
||||
await reviewerAfterAction();
|
||||
}
|
||||
});
|
||||
|
||||
@ -1653,6 +1699,7 @@ test.describe('Glossary tests', () => {
|
||||
await user2.delete(apiContext);
|
||||
await user3.create(apiContext);
|
||||
await team.delete(apiContext);
|
||||
await user4.delete(apiContext);
|
||||
await afterAction();
|
||||
});
|
||||
});
|
||||
|
||||
@ -528,6 +528,87 @@ export const verifyTaskCreated = async (
|
||||
.toContain(glossaryTermData);
|
||||
};
|
||||
|
||||
export const verifyWorkflowInstanceExists = async (
|
||||
page: Page,
|
||||
glossaryTermFqn: string
|
||||
) => {
|
||||
const { apiContext } = await getApiContext(page);
|
||||
const entityLink = encodeURIComponent(
|
||||
`<#E::glossaryTerm::${glossaryTermFqn}>`
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const startTs = new Date(Date.now() - 24 * 60 * 60 * 1000).getTime();
|
||||
const endTs = new Date().getTime();
|
||||
|
||||
const workflowInstanceResponse = await apiContext
|
||||
.get(
|
||||
`api/v1/governance/workflowInstances?entityLink=${entityLink}&startTs=${startTs}&endTs=${endTs}&workflowName=GlossaryTermApprovalWorkflow`
|
||||
)
|
||||
.then((res) => res.json());
|
||||
|
||||
return workflowInstanceResponse?.data?.length > 0;
|
||||
},
|
||||
{
|
||||
message: 'To verify workflow instance exists',
|
||||
timeout: 180_000,
|
||||
intervals: [40_000, 30_000],
|
||||
}
|
||||
)
|
||||
.toBe(true);
|
||||
};
|
||||
|
||||
export const verifyGlossaryWorkflowReviewerCase = async (
|
||||
page: Page,
|
||||
glossaryTermFqn: string
|
||||
) => {
|
||||
await page.getByTestId('workflow-history-widget').click();
|
||||
const { apiContext } = await getApiContext(page);
|
||||
const entityLink = encodeURIComponent(
|
||||
`<#E::glossaryTerm::${glossaryTermFqn}>`
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const startTs = new Date(Date.now() - 24 * 60 * 60 * 1000).getTime();
|
||||
const endTs = new Date().getTime();
|
||||
|
||||
const workflowInstanceResponse = await apiContext
|
||||
.get(
|
||||
`api/v1/governance/workflowInstances?entityLink=${entityLink}&startTs=${startTs}&endTs=${endTs}&workflowName=GlossaryTermApprovalWorkflow`
|
||||
)
|
||||
.then((res) => res.json());
|
||||
|
||||
if (workflowInstanceResponse?.data?.length === 0) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const workflowInstanceId = workflowInstanceResponse?.data[0]?.id;
|
||||
|
||||
if (!workflowInstanceId) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const workflowInstanceState = await apiContext
|
||||
.get(
|
||||
`api/v1/governance/workflowInstanceStates/GlossaryTermApprovalWorkflow/${workflowInstanceId}?startTs=${startTs}&endTs=${endTs}`
|
||||
)
|
||||
.then((res) => res.json());
|
||||
|
||||
return workflowInstanceState?.data[0]?.stage?.displayName ?? '';
|
||||
},
|
||||
{
|
||||
message: 'To verify workflow instance exists',
|
||||
timeout: 180_000,
|
||||
intervals: [40_000, 30_000],
|
||||
}
|
||||
)
|
||||
.toEqual('Auto-Approved by Reviewer');
|
||||
};
|
||||
|
||||
export const validateGlossaryTermTask = async (
|
||||
page: Page,
|
||||
term: GlossaryTermData
|
||||
|
||||
@ -207,6 +207,7 @@ export const AdvanceSearchProvider = ({
|
||||
setTreeInternal(
|
||||
QbUtils.checkTree(QbUtils.loadTree(getEmptyJsonTree()), config)
|
||||
);
|
||||
|
||||
setQueryFilter(undefined);
|
||||
setSQLQuery('');
|
||||
}, [config]);
|
||||
|
||||
@ -51,6 +51,7 @@ import {
|
||||
addEntityTypeFilter,
|
||||
getEntityTypeAggregationFilter,
|
||||
getJsonTreeFromQueryFilter,
|
||||
migrateJsonLogic,
|
||||
READONLY_SETTINGS,
|
||||
} from '../../../../../../utils/QueryBuilderUtils';
|
||||
import { getExplorePath } from '../../../../../../utils/RouterUtils';
|
||||
@ -191,7 +192,10 @@ const QueryBuilderWidget: FC<
|
||||
debouncedFetchEntityCount(parsedValue);
|
||||
}
|
||||
} else {
|
||||
const tree = QbUtils.loadFromJsonLogic(parsedValue, config);
|
||||
// migrate existing json logic to new format
|
||||
const migratedValue = migrateJsonLogic(parsedValue);
|
||||
|
||||
const tree = QbUtils.loadFromJsonLogic(migratedValue, config);
|
||||
if (tree) {
|
||||
const validatedTree = QbUtils.Validation.sanitizeTree(
|
||||
tree,
|
||||
|
||||
@ -234,6 +234,12 @@ export interface TriggerConfiguration {
|
||||
* Select fields that should not trigger the workflow if only them are modified.
|
||||
*/
|
||||
exclude?: string[];
|
||||
/**
|
||||
* JSON Logic expression to determine if the workflow should be triggered. The expression
|
||||
* has access to: entity (current entity), changeDescription (what changed), updatedBy (user
|
||||
* who made the change), changedFields (array of field names that changed).
|
||||
*/
|
||||
filter?: string;
|
||||
/**
|
||||
* Number of Entities to process at once.
|
||||
*/
|
||||
|
||||
@ -32,6 +32,12 @@ export interface TriggerConfiguration {
|
||||
* Select fields that should not trigger the workflow if only them are modified.
|
||||
*/
|
||||
exclude?: string[];
|
||||
/**
|
||||
* JSON Logic expression to determine if the workflow should be triggered. The expression
|
||||
* has access to: entity (current entity), changeDescription (what changed), updatedBy (user
|
||||
* who made the change), changedFields (array of field names that changed).
|
||||
*/
|
||||
filter?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -515,7 +515,7 @@ interface ElasticsearchQuery {
|
||||
};
|
||||
}
|
||||
|
||||
interface JsonLogic {
|
||||
export interface JsonLogic {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
@ -865,6 +865,59 @@ export const getEntityTypeAggregationFilter = (
|
||||
return qFilter;
|
||||
};
|
||||
|
||||
/**
|
||||
* Migrates old JsonLogic format to new format for specific entity reference fields.
|
||||
* @param jsonLogic The original JsonLogic object
|
||||
* @returns The migrated JsonLogic object
|
||||
*/
|
||||
export const migrateJsonLogic = (
|
||||
jsonLogic: Record<string, unknown>
|
||||
): Record<string, unknown> => {
|
||||
const FIELD_MAPPING: Record<string, string> = {
|
||||
[EntityReferenceFields.OWNERS]: 'fullyQualifiedName',
|
||||
[EntityReferenceFields.REVIEWERS]: 'fullyQualifiedName',
|
||||
[EntityReferenceFields.TAG]: 'tagFqn',
|
||||
};
|
||||
|
||||
const isVarObject = (value: unknown): value is { var: string } => {
|
||||
return (
|
||||
typeof value === 'object' &&
|
||||
value !== null &&
|
||||
!Array.isArray(value) &&
|
||||
'var' in value &&
|
||||
typeof (value as Record<string, unknown>)['var'] === 'string'
|
||||
);
|
||||
};
|
||||
|
||||
const migrateNode = (node: JsonLogic): JsonLogic => {
|
||||
if (node === null || typeof node !== 'object') {
|
||||
return node;
|
||||
}
|
||||
if (!Array.isArray(node) && '!!' in node && isVarObject(node['!!'])) {
|
||||
const varName = node['!!'].var;
|
||||
const mappedField = FIELD_MAPPING[varName];
|
||||
if (mappedField) {
|
||||
return {
|
||||
some: [{ var: varName }, { '!=': [{ var: mappedField }, null] }],
|
||||
};
|
||||
}
|
||||
}
|
||||
// Handle arrays
|
||||
if (Array.isArray(node)) {
|
||||
return node.map(migrateNode);
|
||||
}
|
||||
// Handle objects
|
||||
const result: Record<string, JsonLogic> = {};
|
||||
for (const key in node) {
|
||||
result[key] = migrateNode(node[key] as JsonLogic);
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
return migrateNode(jsonLogic) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
export const getFieldsByKeys = (
|
||||
keys: EntityReferenceFields[],
|
||||
mapFields: Record<string, FieldOrGroup>
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user